code
stringlengths
3
1.01M
repo_name
stringlengths
5
116
path
stringlengths
3
311
language
stringclasses
30 values
license
stringclasses
15 values
size
int64
3
1.01M
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.extension.svg; import java.awt.image.Raster; import java.awt.image.WritableRaster; import org.apache.batik.ext.awt.image.rendered.AbstractRed; import org.apache.batik.ext.awt.image.rendered.CachableRed; /** * * @author <a href="mailto:vincent.hardy@eng.sun.com">Vincent Hardy</a> * @version $Id$ */ public class HistogramRed extends AbstractRed { // This is used to track which tiles we have computed // a histogram for. boolean [] computed; int tallied = 0; int [] bins = new int[256]; public HistogramRed(CachableRed src){ super(src, null); int tiles = getNumXTiles()*getNumYTiles(); computed = new boolean[tiles]; } public void tallyTile(Raster r) { final int minX = r.getMinX(); final int minY = r.getMinY(); final int w = r.getWidth(); final int h = r.getHeight(); int [] samples = null; int val; for (int y=minY; y<minY+h; y++) { samples = r.getPixels(minX, y, w, 1, samples); for (int x=0; x<3*w; x++) { // Simple fixed point conversion to lumincence. val = samples[x++]*5; // Red val += samples[x++]*9; // Green val += samples[x++]*2; // blue bins[val>>4]++; } } tallied++; } public int [] getHistogram() { if (tallied == computed.length) return bins; CachableRed src = (CachableRed)getSources().get( 0 ); int yt0 = src.getMinTileY(); int xtiles = src.getNumXTiles(); int xt0 = src.getMinTileX(); for (int y=0; y<src.getNumYTiles(); y++) { for (int x=0; x<xtiles; x++) { int idx = (x+xt0)+y*xtiles; if (computed[idx]) continue; Raster r = src.getTile(x+xt0, y+yt0); tallyTile(r); computed[idx]=true; } } return bins; } public WritableRaster copyData(WritableRaster wr) { copyToRaster(wr); return wr; } public Raster getTile(int tileX, int tileY) { int yt = tileY-getMinTileY(); int xt = tileX-getMinTileX(); CachableRed src = (CachableRed)getSources().get(0); Raster r = src.getTile(tileX, tileY); int idx = xt+yt*getNumXTiles(); if (computed[idx]) return r; tallyTile(r); computed[idx] = true; return r; } }
Squeegee/batik
sources/org/apache/batik/extension/svg/HistogramRed.java
Java
apache-2.0
3,320
package com.fincatto.documentofiscal.nfe310.converters; import org.junit.Assert; import org.junit.Test; import org.simpleframework.xml.stream.InputNode; import org.simpleframework.xml.stream.NodeMap; import org.simpleframework.xml.stream.Position; public class NFStringNullToEmptyConverterTest { @Test public void deveRetornarStringVaziaCasoSejaValorNulo() throws Exception { Assert.assertEquals("", new NFStringNullToEmptyConverter().read(new InputNodeMock())); } class InputNodeMock implements InputNode { @Override public String getName() { return null; } @Override public String getValue() { return null; } @Override public boolean isRoot() { return false; } @Override public boolean isElement() { return false; } @Override public String getPrefix() { return null; } @Override public String getReference() { return null; } @Override public Position getPosition() { return null; } @Override public InputNode getAttribute(final String name) { return null; } @Override public NodeMap<InputNode> getAttributes() { return null; } @Override public InputNode getParent() { return null; } @Override public Object getSource() { return null; } @Override public InputNode getNext() { return null; } @Override public InputNode getNext(final String name) { return null; } @Override public void skip() { } @Override public boolean isEmpty() { return false; } } }
fincatto/nfe
src/test/java/com/fincatto/documentofiscal/nfe310/converters/NFStringNullToEmptyConverterTest.java
Java
apache-2.0
1,927
//---------------------------------------------------------------------- // Copyright (c) Microsoft Open Technologies, Inc. // All Rights Reserved // Apache License 2.0 // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //---------------------------------------------------------------------- using System.Net; using Microsoft.IdentityModel.Clients.ActiveDirectory; namespace Test.ADAL.WinRT.Unit { class ReplayerHttpWebRequestFactory : IHttpWebRequestFactory { public IHttpWebRequest Create(string uri) { return new ReplayerHttpWebRequest(uri); } public IHttpWebResponse CreateResponse(WebResponse response) { return new ReplayerHttpWebResponse(response); } } }
LucVK/azure-activedirectory-library-for-dotnet
tests/Test.ADAL.WinRT.Unit/ReplayerHttpWebRequestFactory.cs
C#
apache-2.0
1,258
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.ui; import com.intellij.codeInspection.ex.InspectionToolWrapper; import com.intellij.codeInspection.reference.RefElement; import com.intellij.codeInspection.reference.RefEntity; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.Navigatable; import com.intellij.psi.NavigatablePsiElement; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.ui.components.JBLabel; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; /** * @author Dmitry Batkovich */ public class InspectionResultsViewUtil { static void releaseEditor(@Nullable Editor editor) { if (editor != null && !editor.isDisposed()) { EditorFactory.getInstance().releaseEditor(editor); } } @Nullable static Navigatable getNavigatableForInvalidNode(ProblemDescriptionNode node) { RefEntity element = node.getElement(); while (element != null && !element.isValid()) { element = element.getOwner(); } if (!(element instanceof RefElement)) return null; PsiElement containingElement = ((RefElement)element).getPsiElement(); if (!(containingElement instanceof NavigatablePsiElement) || !containingElement.isValid()) return null; final int lineNumber = node.getLineNumber(); if (lineNumber != -1) { final PsiFile containingFile = containingElement.getContainingFile(); if (containingFile != null) { final VirtualFile file = containingFile.getVirtualFile(); final Document document = FileDocumentManager.getInstance().getDocument(file); if (document != null && document.getLineCount() > lineNumber) { return new OpenFileDescriptor(containingElement.getProject(), file, lineNumber, 0); } } } return (Navigatable)containingElement; } @NotNull static JLabel getNothingToShowTextLabel() { return createLabelForText(InspectionViewNavigationPanel.getTitleText(false)); } @NotNull static JComponent getInvalidEntityLabel(@NotNull RefEntity entity) { final String name = entity.getName(); return createLabelForText("\'" + name + "\' is no longer valid."); } public static JComponent getPreviewIsNotAvailable(@NotNull RefEntity entity) { final String name = entity.getQualifiedName(); return createLabelForText("Preview is not available for \'" + name + "\'."); } @NotNull static JComponent getApplyingFixLabel(@NotNull InspectionToolWrapper wrapper) { return createLabelForText("Applying quick fix for \'" + wrapper.getDisplayName() + "\'..."); } @NotNull static JLabel createLabelForText(String text) { final JLabel multipleSelectionLabel = new JBLabel(text); multipleSelectionLabel.setVerticalAlignment(SwingConstants.TOP); multipleSelectionLabel.setBorder(JBUI.Borders.empty(16, 12, 0, 0)); return multipleSelectionLabel; } }
paplorinc/intellij-community
platform/lang-impl/src/com/intellij/codeInspection/ui/InspectionResultsViewUtil.java
Java
apache-2.0
3,816
<section class="hero-content"> <h1 class="hero-title">{{ landing.heroTitle }}</h1> </section> <section class="selling-points container clearfix"> <div class="point column third"> <span class="ion-music-note"></span> <h5 class="point-title">Choose your music</h5> <p class="point-description">The world is full of music; why should you have to listen to music that someone else chose?</p> </div> <div class="point column third"> <span class="ion-radio-waves"></span> <h5 class="point-title">Unlimited, streaming, ad-free</h5> <p class="point-description">No arbitrary limits. No distractions.</p> </div> <div class="point column third"> <span class="ion-iphone"></span> <h5 class="point-title">Mobile enabled</h5> <p class="point-description">Listen to your music on the go. This streaming service is available on all mobile platforms.</p> </div> </section>
kaseycolleen/bloc-jams-angular
dist/templates/landing.html
HTML
apache-2.0
980
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import static org.junit.Assert.*; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.Random; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.AttributesFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.Scope; import org.apache.geode.test.dunit.ThreadUtils; import org.apache.geode.test.junit.categories.IntegrationTest; /** * This is a multi threaded tests. This test creates two regions. Region1 which is persistent and * Region2 which is not. There will be four sets of threads. One set doing put, second one doing * gets ,third one doing destroy(key) and the fourth one doing force rolling. * * The put are done for Integer Key objects which are random generated and whose values are between * 0-9 and the Integer value objects whose value can be between -99 to 99. * * Since the keys are only 0-9, this will ensure a high level of concurrency on the same thread * since there are more than 10 threads acting at the same time. * * After all the operations are done, the two regions are checked for equality. After that the * persistent region is closed and recreated so that it can recover the old values and again the two * regions are checked for equality. * This test is run for all modes persist, persist+overflow, * overflow only in syn and async mode. */ @Category(IntegrationTest.class) public class ConcurrentRegionOperationsJUnitTest extends DiskRegionTestingBase { private int numberOfPutsThreads = 5; private int numberOfGetsThreads = 4; private int numberOfDestroysThreads = 3; private int numberOfClearThreads = 2; protected int numberOfForceRollThreads = 3; /** * ms to run concurrent ops for before signalling them to stop */ protected int TIME_TO_RUN = 1000; private boolean exceptionOccurredInPuts = false; private boolean exceptionOccurredInGets = false; private boolean exceptionOccurredInDestroys = false; private boolean exceptionOccurredInClears = false; protected boolean exceptionOccurredInForceRolls = false; // if this test is to run for a longer time, make this true private static final boolean longTest = false; protected boolean failure = false; private boolean validate; protected Region region1; private Region region2; private Map<Integer, Lock> map = new ConcurrentHashMap<Integer, Lock>(); private static int counter = 0; @Override protected final void postSetUp() throws Exception { counter++; if (longTest) { TIME_TO_RUN = 10000; numberOfPutsThreads = 5; numberOfGetsThreads = 4; numberOfDestroysThreads = 3; numberOfClearThreads = 2; numberOfForceRollThreads = 3; } } @Test public void testPersistSyncConcurrency() { this.validate = true; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(99); region1 = DiskRegionHelperFactory.getSyncPersistOnlyRegion(cache, p, Scope.LOCAL); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getSyncPersistOnlyRegion(cache, p, Scope.LOCAL); validate(region1, region2); } @Test public void testPersistAsyncConcurrency() { this.validate = true; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(99); p.setBytesThreshold(0); p.setTimeInterval(1); region1 = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, p); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, p); validate(region1, region2); } @Test public void testPersistAsyncSmallQueueConcurrency() { this.validate = true; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(99); p.setBytesThreshold(1); p.setTimeInterval(0); region1 = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, p); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, p); validate(region1, region2); } @Test public void testPersistAndOverflowSyncConcurrency() { this.validate = true; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(99); p.setOverFlowCapacity(1); region1 = DiskRegionHelperFactory.getSyncOverFlowAndPersistRegion(cache, p); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getSyncOverFlowAndPersistRegion(cache, p); validate(region1, region2); } @Test public void testPersistAndOverflowAsyncConcurrency() { this.validate = true; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(99); p.setBytesThreshold(0); p.setTimeInterval(1); p.setOverFlowCapacity(1); region1 = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, p); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, p); validate(region1, region2); } @Test public void testPersistAndOverflowAsyncSmallQueueConcurrency() { this.validate = true; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(99); p.setBytesThreshold(1); p.setTimeInterval(0); p.setOverFlowCapacity(1); region1 = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, p); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, p); validate(region1, region2); } @Test public void testNVPersistSyncConcurrency() { this.validate = false; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(99); region1 = DiskRegionHelperFactory.getSyncPersistOnlyRegion(cache, p, Scope.LOCAL); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getSyncPersistOnlyRegion(cache, p, Scope.LOCAL); validate(region1, region2); } @Test public void testNVPersistAsyncConcurrency() { this.validate = false; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(99); p.setBytesThreshold(0); p.setTimeInterval(1); region1 = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, p); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, p); validate(region1, region2); } @Test public void testNVPersistAsyncSmallQueueConcurrency() { this.validate = false; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(99); p.setBytesThreshold(1); p.setTimeInterval(0); region1 = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, p); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getAsyncPersistOnlyRegion(cache, p); validate(region1, region2); } @Test public void testNVPersistAndOverflowSyncConcurrency() { this.validate = false; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(100); p.setOverFlowCapacity(1); region1 = DiskRegionHelperFactory.getSyncOverFlowAndPersistRegion(cache, p); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getSyncOverFlowAndPersistRegion(cache, p); validate(region1, region2); } @Test public void testNVPersistAndOverflowAsyncConcurrency() { this.validate = false; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(99); p.setBytesThreshold(0); p.setTimeInterval(1); p.setOverFlowCapacity(1); region1 = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, p); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, p); validate(region1, region2); } @Test public void testNVPersistAndOverflowAsyncSmallQueueConcurrency() { this.validate = false; DiskRegionProperties p = new DiskRegionProperties(); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setCompactionThreshold(99); p.setBytesThreshold(1); p.setTimeInterval(0); p.setOverFlowCapacity(1); region1 = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, p); region2 = concurrencyTest(region1); region1 = DiskRegionHelperFactory.getAsyncOverFlowAndPersistRegion(cache, p); validate(region1, region2); } /** * Tests the bug where a get operation on an evicted entry fails to get value as the oplog is * deleted by the roller, but the entry was not rolled. */ @Test public void testBug35048() { DiskRegionProperties p = new DiskRegionProperties(); p.setMaxOplogSize(1000); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setOverflow(true); p.setSynchronous(false); p.setOverFlowCapacity(5); p.setRolling(true); byte[] val = new byte[50]; region = DiskRegionHelperFactory.getAsyncOverFlowOnlyRegion(cache, p); for (int j = 1; j < 6; ++j) { region.put("" + j, val); } // This will overflow the first entry to disk. Its OplogKeyID will be now // positive & value in VM is null region.put("" + 6, val); // Do a get opeartion on entry which has been overflown to disk. The value // in VM is now not null, but its Oplog KeyID is positive region.get("" + 1); // Do a force roll region.forceRolling(); // Do a get on entries starting from 2 to 6 & then 1. This will ensure that // the entry 1 gets evicted // from memory but is not written to disk as its Oplog KeyId is positive. // then do a get operation on entry with key as 1. The vale should be // correctly retrieved from // Htree implying the roller has rolled the entry correctly try { region.get("" + 2); region.get("" + 3); region.get("" + 4); region.get("" + 5); region.get("" + 6); try { region.get("" + 1); } catch (Exception e) { logWriter.severe("Exception occurred ", e); fail( "Failed to retrieve value from disk as the Oplog has been rolled but entry still references the Oplog."); } } catch (Exception e) { logWriter.severe("Exception occurred ", e); fail("Test failed because of unexpected exception"); } // Now force roll the oplog so that the data exists in the Oplog which has // been deleted } @Test public void testConcurrentForceRollingAndGetOperation() { DiskRegionProperties p = new DiskRegionProperties(); p.setMaxOplogSize(1000); p.setRegionName(this.getName() + counter); p.setDiskDirs(dirs); p.setOverflow(true); p.setSynchronous(false); p.setOverFlowCapacity(5); p.setRolling(true); byte[] val = new byte[50]; region = DiskRegionHelperFactory.getAsyncOverFlowOnlyRegion(cache, p); for (int j = 1; j < 101; ++j) { region.put("" + j, val); } Thread t1 = new Thread(new Runnable() { public void run() { for (int i = 0; i < 100; ++i) { region.forceRolling(); try { Thread.sleep(TIME_TO_RUN / 100); } catch (InterruptedException e) { fail("interrupted"); } } } }); Thread t2 = new Thread(new Runnable() { public void run() { try { for (int i = 0; i < 100; ++i) { for (int j = 1; j < 101; ++j) { region.get("" + j); } } } catch (Exception e) { e.printStackTrace(); failure = true; } } }); t1.start(); t2.start(); ThreadUtils.join(t1, 30 * 1000); ThreadUtils.join(t2, 30 * 1000); assertTrue(!failure); } private final AtomicBoolean timeToStop = new AtomicBoolean(); private boolean isItTimeToStop() { return this.timeToStop.get(); } private CyclicBarrier startLine; private void waitForAllStartersToBeReady() { try { startLine.await(); } catch (InterruptedException ie) { throw new AssertionError("unexpected ", ie); } catch (BrokenBarrierException ex) { throw new AssertionError("unexpected ", ex); } } private Region concurrencyTest(Region r1) { if (this.validate) { for (int i = 0; i < 10; i++) { map.put(Integer.valueOf(i), new ReentrantLock()); } region2 = cache.createVMRegion("testRegion2", new AttributesFactory().createRegionAttributes()); } this.startLine = new CyclicBarrier(numberOfPutsThreads + numberOfGetsThreads + numberOfDestroysThreads + numberOfClearThreads + numberOfForceRollThreads); DoesPuts doesPuts = new DoesPuts(); DoesGets doesGets = new DoesGets(); DoesDestroy doesDestroy = new DoesDestroy(); DoesClear doesClear = new DoesClear(); DoesForceRoll doesForceRoll = new DoesForceRoll(); Thread[] putThreads = new Thread[numberOfPutsThreads]; Thread[] getThreads = new Thread[numberOfGetsThreads]; Thread[] destroyThreads = new Thread[numberOfDestroysThreads]; Thread[] clearThreads = new Thread[numberOfClearThreads]; Thread[] forceRollThreads = new Thread[numberOfForceRollThreads]; for (int i = 0; i < numberOfPutsThreads; i++) { putThreads[i] = new Thread(doesPuts); putThreads[i].setName("PutThread" + i); } for (int i = 0; i < numberOfGetsThreads; i++) { getThreads[i] = new Thread(doesGets); getThreads[i].setName("GetThread" + i); } for (int i = 0; i < numberOfDestroysThreads; i++) { destroyThreads[i] = new Thread(doesDestroy); destroyThreads[i].setName("DelThread" + i); } for (int i = 0; i < numberOfClearThreads; i++) { clearThreads[i] = new Thread(doesClear); clearThreads[i].setName("ClearThread" + i); } for (int i = 0; i < numberOfForceRollThreads; i++) { forceRollThreads[i] = new Thread(doesForceRoll); forceRollThreads[i].setName("ForceRoll" + i); } this.timeToStop.set(false); try { for (int i = 0; i < numberOfPutsThreads; i++) { putThreads[i].start(); } for (int i = 0; i < numberOfGetsThreads; i++) { getThreads[i].start(); } for (int i = 0; i < numberOfDestroysThreads; i++) { destroyThreads[i].start(); } for (int i = 0; i < numberOfClearThreads; i++) { clearThreads[i].start(); } for (int i = 0; i < numberOfForceRollThreads; i++) { forceRollThreads[i].start(); } try { Thread.sleep(TIME_TO_RUN); } catch (InterruptedException e) { fail("interrupted"); } } finally { this.timeToStop.set(true); } for (int i = 0; i < numberOfPutsThreads; i++) { ThreadUtils.join(putThreads[i], 60 * 1000); } for (int i = 0; i < numberOfGetsThreads; i++) { ThreadUtils.join(getThreads[i], 60 * 1000); } for (int i = 0; i < numberOfDestroysThreads; i++) { ThreadUtils.join(destroyThreads[i], 60 * 1000); } for (int i = 0; i < numberOfClearThreads; i++) { ThreadUtils.join(clearThreads[i], 60 * 1000); } for (int i = 0; i < numberOfForceRollThreads; i++) { ThreadUtils.join(forceRollThreads[i], 60 * 1000); } if (this.validate) { Collection entrySet = region2.entrySet(); Iterator iterator = entrySet.iterator(); Map.Entry mapEntry = null; Object key, value = null; ((LocalRegion) r1).getDiskRegion().forceFlush(); while (iterator.hasNext()) { mapEntry = (Map.Entry) iterator.next(); key = mapEntry.getKey(); value = mapEntry.getValue(); if (!(r1.containsKey(key))) { fail(" region1 does not contain Key " + key + " but was expected to be there"); } if (!(((LocalRegion) r1).getValueOnDisk(key).equals(value))) { fail(" value for key " + key + " is " + ((LocalRegion) r1).getValueOnDisk(key) + " which is not consistent, it is supposed to be " + value); } } } r1.close(); if (exceptionOccurredInDestroys) { fail("Exception occurred while destroying"); } if (exceptionOccurredInClears) { fail("Exception occurred while clearing"); } if (exceptionOccurredInForceRolls) { fail("Exception occurred while force Rolling"); } if (exceptionOccurredInGets) { fail("Exception occurred while doing gets"); } if (exceptionOccurredInPuts) { fail("Exception occurred while doing puts"); } return region2; } void validate(Region r1, Region r2) { if (!this.validate) return; Collection entrySet = r2.entrySet(); Iterator iterator = entrySet.iterator(); Map.Entry mapEntry = null; Object key, value = null; while (iterator.hasNext()) { mapEntry = (Map.Entry) iterator.next(); key = mapEntry.getKey(); value = mapEntry.getValue(); if (!(r1.containsKey(key))) { fail(" region1 does not contain Key " + key + " but was expected to be there"); } if (!(r1.get(key).equals(value))) { fail(" value for key " + key + " is " + r1.get(key) + " which is not consistent, it is supposed to be " + value); } } assertEquals(r2.size(), r1.size()); r1.destroyRegion(); r2.destroyRegion(); } private Random random = new Random(); void put() { int randomInt1 = random.nextInt() % 10; if (randomInt1 < 0) { randomInt1 = randomInt1 * (-1); } int randomInt2 = random.nextInt() % 100; Integer integer1 = Integer.valueOf(randomInt1); Integer integer2 = Integer.valueOf(randomInt2); Object v = null; Object expected = null; Lock lock = null; if (this.validate) { lock = map.get(integer1); lock.lock(); } try { try { v = region1.put(integer1, integer2); if (this.validate) { expected = region2.put(integer1, integer2); } } catch (Exception e) { exceptionOccurredInPuts = true; logWriter.severe("Exception occurred in puts ", e); fail(" failed during put due to " + e); } } finally { if (lock != null) { lock.unlock(); } } if (this.validate) { if (v != null) { assertEquals(expected, v); } } } void get() { int randomInt1 = random.nextInt() % 10; if (randomInt1 < 0) { randomInt1 = randomInt1 * (-1); } Integer integer1 = Integer.valueOf(randomInt1); Object v = null; Object expected = null; Lock lock = null; if (this.validate) { lock = map.get(integer1); lock.lock(); } try { try { v = region1.get(integer1); if (this.validate) { expected = region2.get(integer1); } } catch (Exception e) { exceptionOccurredInGets = true; logWriter.severe("Exception occurred in get ", e); throw new AssertionError(" failed during get due to ", e); } } finally { if (lock != null) { lock.unlock(); } } if (this.validate) { assertEquals(expected, v); } } void destroy() { Exception exceptionOccurred1 = null; Exception exceptionOccurred2 = null; int randomInt1 = random.nextInt() % 10; if (randomInt1 < 0) { randomInt1 = randomInt1 * (-1); } Integer integer1 = Integer.valueOf(randomInt1); Object v = null; Object expected = null; Lock lock = null; if (this.validate) { lock = map.get(integer1); lock.lock(); } try { try { v = region1.destroy(integer1); } catch (Exception e) { exceptionOccurred1 = e; } if (this.validate) { try { expected = region2.destroy(integer1); } catch (Exception e) { exceptionOccurred2 = e; } if ((exceptionOccurred1 != null) ^ (exceptionOccurred2 != null)) { exceptionOccurredInDestroys = true; logWriter.severe("Exception occurred in destroy ex1=" + exceptionOccurred1 + " ex2=" + exceptionOccurred2); fail("Exception occurred in destroy"); } } } finally { if (lock != null) { lock.unlock(); } } if (this.validate) { if (v != null) { assertEquals(expected, v); } } } void clear() { if (this.validate) { return; // can't do clear and validate } try { region1.clear(); } catch (Exception e) { exceptionOccurredInClears = true; logWriter.severe("Exception occurred in clear=", e); throw new AssertionError("Exception occurred in clear", e); } } /** * Bug Test for bug # 35139. This bug was occuring because a clear & region destroy operation * occurred near concurrently. The region destroy operation notified the roller thread to stop & * then it joined with the roller . But by that time clear operation created a new instance of * roller thread ( because a clear operation stop/starts the roller) & the destroy operation * actually joined with the new thread ( different from the one on which notification was issued * to exit). */ @Test public void testConcurrentClearAndRegionDestroyBug() { DiskRegionProperties p = new DiskRegionProperties(); p.setMaxOplogSize(10000); p.setOverflow(false); p.setSynchronous(true); p.setOverFlowCapacity(5); p.setRolling(true); byte[] val = new byte[8000]; region = DiskRegionHelperFactory.getSyncPersistOnlyRegion(cache, p, Scope.LOCAL); region.put("key1", val); DiskStoreImpl dimpl = ((LocalRegion) region).getDiskStore(); LocalRegion.ISSUE_CALLBACKS_TO_CACHE_OBSERVER = true; final Thread th = new Thread(new Runnable() { public void run() { region.destroyRegion(); } }); DiskStoreImpl.DEBUG_DELAY_JOINING_WITH_COMPACTOR = 8000; CacheObserver old = CacheObserverHolder.setInstance(new CacheObserverAdapter() { boolean skip = false; public void beforeStoppingCompactor() { if (!skip) { skip = true; th.setPriority(9); th.start(); Thread.yield(); } } } ); region.clear(); ThreadUtils.join(th, 20 * 1000); LocalRegion.ISSUE_CALLBACKS_TO_CACHE_OBSERVER = false; DiskStoreImpl.DEBUG_DELAY_JOINING_WITH_COMPACTOR = 500; CacheObserverHolder.setInstance(old); } @SuppressWarnings("synthetic-access") class DoesPuts implements Runnable { public void run() { waitForAllStartersToBeReady(); while (!isItTimeToStop()) { put(); } } } @SuppressWarnings("synthetic-access") class DoesGets implements Runnable { public void run() { waitForAllStartersToBeReady(); while (!isItTimeToStop()) { get(); } } } @SuppressWarnings("synthetic-access") class DoesDestroy implements Runnable { public void run() { waitForAllStartersToBeReady(); while (!isItTimeToStop()) { destroy(); } } } @SuppressWarnings("synthetic-access") class DoesClear implements Runnable { public void run() { waitForAllStartersToBeReady(); while (!isItTimeToStop()) { try { Thread.sleep(TIME_TO_RUN / 100); } catch (InterruptedException e) { fail("interrupted"); } clear(); } } } @SuppressWarnings("synthetic-access") class DoesForceRoll implements Runnable { public void run() { waitForAllStartersToBeReady(); while (!isItTimeToStop()) { try { Thread.sleep(20); } catch (InterruptedException e) { fail("interrupted"); } forceRoll(); } } private void forceRoll() { try { region1.forceRolling(); } catch (Exception e) { exceptionOccurredInForceRolls = true; logWriter.severe("Exception occurred in forceRolling ", e); throw new AssertionError(" Exception occurred here", e); } } } }
pivotal-amurmann/geode
geode-core/src/test/java/org/apache/geode/internal/cache/ConcurrentRegionOperationsJUnitTest.java
Java
apache-2.0
26,129
require 'formula' # NOTE: When updating Wine, please check Wine-Gecko and Wine-Mono for updates # too: # - http://wiki.winehq.org/Gecko # - http://wiki.winehq.org/Mono class Wine < Formula homepage 'https://www.winehq.org/' stable do url 'https://downloads.sourceforge.net/project/wine/Source/wine-1.6.2.tar.bz2' sha256 'f0ab9eede5a0ccacbf6e50682649f9377b9199e49cf55641f1787cf72405acbe' resource 'gecko' do url 'https://downloads.sourceforge.net/wine/wine_gecko-2.21-x86.msi', :using => :nounzip sha1 'a514fc4d53783a586c7880a676c415695fe934a3' end resource 'mono' do url 'https://downloads.sourceforge.net/wine/wine-mono-0.0.8.msi', :using => :nounzip sha256 '3dfc23bbc29015e4e538dab8b83cb825d3248a0e5cf3b3318503ee7331115402' end end bottle do sha1 "348f15e19880888d19d04d2fe4bad42048fe6828" => :yosemite sha1 "69f05602ecde44875cf26297871186aaa0b26cd7" => :mavericks sha1 "a89371854006687b74f4446a52ddb1f68cfafa7e" => :mountain_lion end devel do url "https://downloads.sourceforge.net/project/wine/Source/wine-1.7.37.tar.bz2" sha256 "6730ec79bc8d5f61ab90d9cb51daab26a57c1a79e2804e03731f060dea4af305" depends_on "samba" => :optional depends_on "gnutls" # Patch to fix screen-flickering issues. Still relevant on 1.7.23. # https://bugs.winehq.org/show_bug.cgi?id=34166 patch do url "https://bugs.winehq.org/attachment.cgi?id=47639" sha1 "c195f4b9c0af450c7dc3f396e8661ea5248f2b01" end end head do url "git://source.winehq.org/git/wine.git" depends_on "samba" => :optional option "with-win64", "Build with win64 emulator (won't run 32-bit binaries.)" end # note that all wine dependencies should declare a --universal option in their formula, # otherwise homebrew will not notice that they are not built universal def require_universal_deps? true end # Wine will build both the Mac and the X11 driver by default, and you can switch # between them. But if you really want to build without X11, you can. depends_on :x11 => :recommended depends_on 'pkg-config' => :build depends_on 'freetype' depends_on 'jpeg' depends_on 'libgphoto2' depends_on 'little-cms2' depends_on 'libicns' depends_on 'libtiff' depends_on 'sane-backends' depends_on 'libgsm' => :optional resource 'gecko' do url 'https://downloads.sourceforge.net/wine/wine_gecko-2.34-x86.msi', :using => :nounzip sha256 '956c26bf302b1864f4d7cb6caee4fc83d4c1281157731761af6395b876e29ca7' end resource 'mono' do url 'https://downloads.sourceforge.net/wine/wine-mono-4.5.4.msi', :using => :nounzip sha256 '20bced7fee01f25279edf07670c5033d25c2c9834a839e7a20410ce1c611d6f2' end fails_with :llvm do build 2336 cause 'llvm-gcc does not respect force_align_arg_pointer' end fails_with :clang do build 425 cause "Clang prior to Xcode 5 miscompiles some parts of wine" end # These libraries are not specified as dependencies, or not built as 32-bit: # configure: libv4l, gstreamer-0.10, libcapi20, libgsm # Wine loads many libraries lazily using dlopen calls, so it needs these paths # to be searched by dyld. # Including /usr/lib because wine, as of 1.3.15, tries to dlopen # libncurses.5.4.dylib, and fails to find it without the fallback path. def library_path paths = %W[#{HOMEBREW_PREFIX}/lib /usr/lib] paths.unshift(MacOS::X11.lib) if build.with? 'x11' paths.join(':') end def wine_wrapper; <<-EOS.undent #!/bin/sh DYLD_FALLBACK_LIBRARY_PATH="#{library_path}" "#{bin}/wine.bin" "$@" EOS end def install ENV.m32 # Build 32-bit; Wine doesn't support 64-bit host builds on OS X. # Help configure find libxml2 in an XCode only (no CLT) installation. ENV.libxml2 args = ["--prefix=#{prefix}"] args << "--disable-win16" if MacOS.version <= :leopard args << "--enable-win64" if build.with? "win64" # 64-bit builds of mpg123 are incompatible with 32-bit builds of Wine args << "--without-mpg123" if Hardware.is_64_bit? args << "--without-x" if build.without? 'x11' system "./configure", *args # The Mac driver uses blocks and must be compiled with an Apple compiler # even if the rest of Wine is built with A GNU compiler. unless ENV.compiler == :clang || ENV.compiler == :llvm || ENV.compiler == :gcc system "make", "dlls/winemac.drv/Makefile" inreplace "dlls/winemac.drv/Makefile" do |s| # We need to use the real compiler, not the superenv shim, which will exec the # configured compiler no matter what name is used to invoke it. cc, cxx = s.get_make_var("CC"), s.get_make_var("CXX") s.change_make_var! "CC", cc.sub(ENV.cc, "xcrun clang") if cc s.change_make_var! "CXX", cc.sub(ENV.cxx, "xcrun clang++") if cxx # Emulate some things that superenv would normally handle for us # We're configured to use GNU GCC, so remote an unsupported flag s.gsub! "-gstabs+", "" # Pass the sysroot to support Xcode-only systems cflags = s.get_make_var("CFLAGS") cflags += " --sysroot=#{MacOS.sdk_path}" s.change_make_var! "CFLAGS", cflags end end system "make install" (share/'wine/gecko').install resource('gecko') (share/'wine/mono').install resource('mono') # Use a wrapper script, so rename wine to wine.bin # and name our startup script wine mv bin/'wine', bin/'wine.bin' (bin/'wine').write(wine_wrapper) # Don't need Gnome desktop support (share/'applications').rmtree end def caveats s = <<-EOS.undent You may want to get winetricks: brew install winetricks The current version of Wine contains a partial implementation of dwrite.dll which may cause text rendering issues in applications such as Steam. We recommend that you run winecfg, add an override for dwrite in the Libraries tab, and edit the override mode to "disable". See: https://bugs.winehq.org/show_bug.cgi?id=31374 EOS if build.with? 'x11' s += <<-EOS.undent By default Wine uses a native Mac driver. To switch to the X11 driver, use regedit to set the "graphics" key under "HKCU\Software\Wine\Drivers" to "x11" (or use winetricks). For best results with X11, install the latest version of XQuartz: https://xquartz.macosforge.org/ EOS end return s end end
jasonm23/homebrew
Library/Formula/wine.rb
Ruby
bsd-2-clause
6,499
/* * The Minimal snprintf() implementation * * Copyright (c) 2013 Michal Ludvig <michal@logix.cz> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the auhor nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef __MINI_PRINTF__ #define __MINI_PRINTF__ #include <stdarg.h> int mini_vsnprintf(char* buffer, unsigned int buffer_len, char *fmt, va_list va); int mini_snprintf(char* buffer, unsigned int buffer_len, char *fmt, ...); #define vsnprintf mini_vsnprintf #define snprintf mini_snprintf #endif
techno/mist32e-demos
mini-printf/mini-printf.h
C
bsd-2-clause
1,922
/*- * Copyright (c) 2006 Verdens Gang AS * Copyright (c) 2006-2014 Varnish Software AS * All rights reserved. * * Author: Poul-Henning Kamp <phk@phk.freebsd.dk> * Author: Martin Blix Grydeland <martin@varnish-software.com> * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ #include "config.h" #include <sys/mman.h> #include <sys/stat.h> #include <sys/types.h> #include <errno.h> #include <fcntl.h> #include <stdarg.h> #include <stdint.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include "miniobj.h" #include "vas.h" #include "vapi/vsm.h" #include "vapi/vsm_int.h" #include "vtim.h" #include "vin.h" #include "vsb.h" #include "vsm_api.h" #ifndef MAP_HASSEMAPHORE #define MAP_HASSEMAPHORE 0 /* XXX Linux */ #endif /*--------------------------------------------------------------------*/ struct VSM_data * VSM_New(void) { struct VSM_data *vd; ALLOC_OBJ(vd, VSM_MAGIC); if (vd == NULL) return (vd); REPLACE(vd->name, ""); vd->vsm_fd = -1; CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); return (vd); } /*--------------------------------------------------------------------*/ int vsm_diag(struct VSM_data *vd, const char *fmt, ...) { va_list ap; CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); AN(fmt); if (vd->diag == NULL) vd->diag = VSB_new_auto(); AN(vd->diag); VSB_clear(vd->diag); va_start(ap, fmt); VSB_vprintf(vd->diag, fmt, ap); va_end(ap); AZ(VSB_finish(vd->diag)); return (-1); } /*--------------------------------------------------------------------*/ const char * VSM_Error(const struct VSM_data *vd) { CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); if (vd->diag == NULL) return (NULL); else return (VSB_data(vd->diag)); } /*--------------------------------------------------------------------*/ void VSM_ResetError(struct VSM_data *vd) { CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); if (vd->diag == NULL) return; VSB_delete(vd->diag); vd->diag = NULL; } /*--------------------------------------------------------------------*/ int VSM_n_Arg(struct VSM_data *vd, const char *arg) { char *name = NULL; char *fname = NULL; CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); if (vd->head) return (vsm_diag(vd, "VSM_n_Arg: Already open\n")); if (VIN_N_Arg(arg, &name, NULL, &fname)) return (vsm_diag(vd, "Invalid instance name: %s\n", strerror(errno))); AN(name); AN(fname); if (vd->name) free(vd->name); vd->name = name; if (vd->fname) free(vd->fname); vd->fname = fname; vd->N_opt = 0; return (1); } /*--------------------------------------------------------------------*/ int VSM_N_Arg(struct VSM_data *vd, const char *arg) { CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); AN(arg); if (vd->head) return (vsm_diag(vd, "VSM_N_Arg: Already open\n")); REPLACE(vd->name, arg); REPLACE(vd->fname, arg); vd->N_opt = 1; return (1); } /*--------------------------------------------------------------------*/ const char * VSM_Name(const struct VSM_data *vd) { CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); return (vd->name); } /*--------------------------------------------------------------------*/ void VSM_Delete(struct VSM_data *vd) { CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); VSM_Close(vd); if (vd->vsc != NULL) VSC_Delete(vd); VSM_ResetError(vd); free(vd->name); free(vd->fname); FREE_OBJ(vd); } /*-------------------------------------------------------------------- * The internal VSM open function * * Return: * 0 = success * <0 = failure * */ /*--------------------------------------------------------------------*/ int VSM_Open(struct VSM_data *vd) { int i; struct VSM_head slh; void *v; CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); if (vd->head != NULL) /* Already open */ return (0); if (vd->fname == NULL) { /* Use default (hostname) */ i = VSM_n_Arg(vd, ""); if (i < 0) return (i); AN(vd->fname); } vd->vsm_fd = open(vd->fname, O_RDONLY); if (vd->vsm_fd < 0) return (vsm_diag(vd, "Cannot open %s: %s\n", vd->fname, strerror(errno))); AZ(fstat(vd->vsm_fd, &vd->fstat)); if (!S_ISREG(vd->fstat.st_mode)) { AZ(close(vd->vsm_fd)); vd->vsm_fd = -1; return (vsm_diag(vd, "%s is not a regular file\n", vd->fname)); } i = read(vd->vsm_fd, &slh, sizeof slh); if (i != sizeof slh) { AZ(close(vd->vsm_fd)); vd->vsm_fd = -1; return(vsm_diag(vd, "Cannot read %s: %s\n", vd->fname, strerror(errno))); } if (memcmp(slh.marker, VSM_HEAD_MARKER, sizeof slh.marker)) { AZ(close(vd->vsm_fd)); vd->vsm_fd = -1; return (vsm_diag(vd, "Not a VSM file %s\n", vd->fname)); } if (!vd->N_opt && slh.alloc_seq == 0) { AZ(close(vd->vsm_fd)); vd->vsm_fd = -1; return (vsm_diag(vd, "Abandoned VSM file (Varnish not running?) %s\n", vd->fname)); } v = mmap(NULL, slh.shm_size, PROT_READ, MAP_SHARED|MAP_HASSEMAPHORE, vd->vsm_fd, 0); if (v == MAP_FAILED) { AZ(close(vd->vsm_fd)); vd->vsm_fd = -1; return (vsm_diag(vd, "Cannot mmap %s: %s\n", vd->fname, strerror(errno))); } vd->head = v; vd->b = v; vd->e = vd->b + slh.shm_size; vd->age_ok = vd->head->age; vd->t_ok = VTIM_mono(); return (0); } /*--------------------------------------------------------------------*/ void VSM_Close(struct VSM_data *vd) { CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); if (vd->head == NULL) return; assert(vd->vsm_fd >= 0); AZ(munmap((void*)vd->b, vd->e - vd->b)); vd->b = NULL; vd->e = NULL; vd->head = NULL; AZ(close(vd->vsm_fd)); vd->vsm_fd = -1; } /*--------------------------------------------------------------------*/ int VSM_Abandoned(struct VSM_data *vd) { struct stat st; double now; CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); if (vd->head == NULL) /* Not open */ return (1); if (vd->N_opt) /* No abandonment check should be done */ return (0); if (!vd->head->alloc_seq) /* Flag of abandonment set by mgt */ return (1); if (vd->head->age < vd->age_ok) /* Age going backwards */ return (1); now = VTIM_mono(); if (vd->head->age == vd->age_ok && now - vd->t_ok > 2.) { /* No age change for 2 seconds, stat the file */ if (stat(vd->fname, &st)) return (1); if (st.st_dev != vd->fstat.st_dev) return (1); if (st.st_ino != vd->fstat.st_ino) return (1); vd->t_ok = now; } else if (vd->head->age > vd->age_ok) { /* It is aging, update timestamps */ vd->t_ok = now; vd->age_ok = vd->head->age; } return (0); } /*--------------------------------------------------------------------*/ void VSM__iter0(const struct VSM_data *vd, struct VSM_fantom *vf) { CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); AN(vf); memset(vf, 0, sizeof *vf); } int VSM__itern(const struct VSM_data *vd, struct VSM_fantom *vf) { struct VSM_chunk *c = NULL; CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); AN(vf); if (!vd->head) return (0); /* Not open */ if (!vd->N_opt && vd->head->alloc_seq == 0) return (0); /* abandoned VSM */ else if (vf->chunk != NULL) { /* get next chunk */ if (!vd->N_opt && vf->priv != vd->head->alloc_seq) return (0); /* changes during iteration */ if (vf->chunk->len == 0) return (0); /* free'd during iteration */ if (vf->chunk->next == 0) return (0); /* last */ c = (struct VSM_chunk *)(void*)(vd->b + vf->chunk->next); assert(c != vf->chunk); } else if (vd->head->first == 0) { return (0); /* empty vsm */ } else { /* get first chunk */ AZ(vf->chunk); c = (struct VSM_chunk *)(void*)(vd->b + vd->head->first); } AN(c); if (memcmp(c->marker, VSM_CHUNK_MARKER, sizeof c->marker)) return (0); /* XXX - assert? */ vf->chunk = c; vf->priv = vd->head->alloc_seq; vf->b = (void*)(vf->chunk + 1); vf->e = (char*)vf->b + vf->chunk->len; strncpy(vf->class, vf->chunk->class, sizeof vf->class); vf->class[sizeof vf->class - 1] = '\0'; strncpy(vf->type, vf->chunk->type, sizeof vf->type); vf->type[sizeof vf->type - 1] = '\0'; strncpy(vf->ident, vf->chunk->ident, sizeof vf->ident); vf->ident[sizeof vf->ident - 1] = '\0'; return (1); } /*--------------------------------------------------------------------*/ enum VSM_valid_e VSM_StillValid(const struct VSM_data *vd, struct VSM_fantom *vf) { struct VSM_fantom f2; CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); AN(vf); if (!vd->head) return (VSM_invalid); if (!vd->N_opt && !vd->head->alloc_seq) return (VSM_invalid); if (vf->chunk == NULL) return (VSM_invalid); if (vf->priv == vd->head->alloc_seq) return (VSM_valid); VSM_FOREACH(&f2, vd) { if (f2.chunk != vf->chunk || f2.b != vf->b || f2.e != vf->e) continue; if (strcmp(f2.class, vf->class)) continue; if (strcmp(f2.type, vf->type)) continue; if (strcmp(f2.ident, vf->ident)) continue; vf->priv = vd->head->alloc_seq; return (VSM_similar); } return (VSM_invalid); } int VSM_Get(const struct VSM_data *vd, struct VSM_fantom *vf, const char *class, const char *type, const char *ident) { CHECK_OBJ_NOTNULL(vd, VSM_MAGIC); VSM_FOREACH(vf, vd) { if (strcmp(vf->class, class)) continue; if (type != NULL && strcmp(vf->type, type)) continue; if (ident != NULL && strcmp(vf->ident, ident)) continue; return (1); } memset(vf, 0, sizeof *vf); return (0); }
adeelshahid/Varnish-Cache
lib/libvarnishapi/vsm.c
C
bsd-2-clause
10,283
""" Statistics for astronomy """ import numpy as np from scipy.stats.distributions import rv_continuous def bivariate_normal(mu=[0, 0], sigma_1=1, sigma_2=1, alpha=0, size=None, return_cov=False): """Sample points from a 2D normal distribution Parameters ---------- mu : array-like (length 2) The mean of the distribution sigma_1 : float The unrotated x-axis width sigma_2 : float The unrotated y-axis width alpha : float The rotation counter-clockwise about the origin size : tuple of ints, optional Given a shape of, for example, ``(m,n,k)``, ``m*n*k`` samples are generated, and packed in an `m`-by-`n`-by-`k` arrangement. Because each sample is `N`-dimensional, the output shape is ``(m,n,k,N)``. If no shape is specified, a single (`N`-D) sample is returned. return_cov : boolean, optional If True, return the computed covariance matrix. Returns ------- out : ndarray The drawn samples, of shape *size*, if that was provided. If not, the shape is ``(N,)``. In other words, each entry ``out[i,j,...,:]`` is an N-dimensional value drawn from the distribution. cov : ndarray The 2x2 covariance matrix. Returned only if return_cov == True. Notes ----- This function works by computing a covariance matrix from the inputs, and calling ``np.random.multivariate_normal()``. If the covariance matrix is available, this function can be called directly. """ # compute covariance matrix sigma_xx = ((sigma_1 * np.cos(alpha)) ** 2 + (sigma_2 * np.sin(alpha)) ** 2) sigma_yy = ((sigma_1 * np.sin(alpha)) ** 2 + (sigma_2 * np.cos(alpha)) ** 2) sigma_xy = (sigma_1 ** 2 - sigma_2 ** 2) * np.sin(alpha) * np.cos(alpha) cov = np.array([[sigma_xx, sigma_xy], [sigma_xy, sigma_yy]]) # draw points from the distribution x = np.random.multivariate_normal(mu, cov, size) if return_cov: return x, cov else: return x #---------------------------------------------------------------------- # Define some new distributions based on rv_continuous class trunc_exp_gen(rv_continuous): """A truncated positive exponential continuous random variable. The probability distribution is:: p(x) ~ exp(k * x) between a and b = 0 otherwise The arguments are (a, b, k) %(before_notes)s %(example)s """ def _argcheck(self, a, b, k): self._const = k / (np.exp(k * b) - np.exp(k * a)) return (a != b) and not np.isinf(k) def _pdf(self, x, a, b, k): pdf = self._const * np.exp(k * x) pdf[(x < a) | (x > b)] = 0 return pdf def _rvs(self, a, b, k): y = np.random.random(self._size) return (1. / k) * np.log(1 + y * k / self._const) trunc_exp = trunc_exp_gen(name="trunc_exp", shapes='a, b, k') class linear_gen(rv_continuous): """A truncated positive exponential continuous random variable. The probability distribution is:: p(x) ~ c * x + d between a and b = 0 otherwise The arguments are (a, b, c). d is set by the normalization %(before_notes)s %(example)s """ def _argcheck(self, a, b, c): return (a != b) and not np.isinf(c) def _pdf(self, x, a, b, c): d = 1. / (b - a) - 0.5 * c * (b + a) pdf = c * x + d pdf[(x < a) | (x > b)] = 0 return pdf def _rvs(self, a, b, c): mu = 0.5 * (a + b) W = (b - a) x0 = 1. / c / W - mu r = np.random.random(self._size) return -x0 + np.sqrt(2. * r / c + a * a + 2. * a * x0 + x0 * x0) linear = linear_gen(name="linear", shapes='a, b, c')
nhuntwalker/astroML
astroML/stats/random.py
Python
bsd-2-clause
3,890
class Castxml < Formula desc "C-family Abstract Syntax Tree XML Output" homepage "https://github.com/CastXML/CastXML" url "https://mirrors.ocf.berkeley.edu/debian/pool/main/c/castxml/castxml_0.1+git20170823.orig.tar.xz" mirror "https://mirrorservice.org/sites/ftp.debian.org/debian/pool/main/c/castxml/castxml_0.1+git20170823.orig.tar.xz" version "0.1+git20170823" sha256 "aa10c17f703ef46a88f9772205d8f51285fd3567aa91931ee1a7a5abfff95b11" revision 2 head "https://github.com/CastXML/castxml.git" bottle do cellar :any sha256 "2631a24141657b845f4c474b0ce1baea3efbf6d56c3e7b8eabe8f4d48dc46102" => :high_sierra sha256 "77950bd5fd2d2f482fbe768048b99745fd95fda6ad67e5d2d13b2d8b91fb3b7c" => :sierra sha256 "fd750ed01cb0b5e4724d4ba8cd4da776eaab475cb0d2c724a01131659d13f464" => :el_capitan end depends_on "cmake" => :build depends_on "llvm@5" def install mkdir "build" do system "cmake", "..", *std_cmake_args system "make", "install" end end test do (testpath/"test.cpp").write <<~EOS int main() { return 0; } EOS system "#{bin}/castxml", "-c", "-x", "c++", "--castxml-cc-gnu", "clang++", "--castxml-gccxml", "-o", "test.xml", "test.cpp" end end
moderndeveloperllc/homebrew-core
Formula/castxml.rb
Ruby
bsd-2-clause
1,243
//$$CDS-header$$ #include "priority_queue/hdr_pqueue.h" #include <cds/container/fcpriority_queue.h> #include <deque> namespace priority_queue { void PQueueHdrTest::FCPQueue_deque() { typedef cds::container::FCPriorityQueue< PQueueHdrTest::value_type ,std::priority_queue< PQueueHdrTest::value_type ,std::deque<PQueueHdrTest::value_type> ,PQueueHdrTest::less > > pqueue_type; test_fcpqueue<pqueue_type>(); } void PQueueHdrTest::FCPQueue_deque_stat() { typedef cds::container::FCPriorityQueue< PQueueHdrTest::value_type ,std::priority_queue< PQueueHdrTest::value_type ,std::deque<PQueueHdrTest::value_type> ,PQueueHdrTest::less > ,cds::container::fcpqueue::make_traits< cds::opt::stat< cds::container::fcpqueue::stat<> > >::type > pqueue_type; test_fcpqueue<pqueue_type>(); } void PQueueHdrTest::FCPQueue_deque_mutex() { typedef cds::container::FCPriorityQueue< PQueueHdrTest::value_type ,std::priority_queue< PQueueHdrTest::value_type ,std::deque<PQueueHdrTest::value_type> > ,cds::container::fcpqueue::make_traits< cds::opt::lock_type< std::mutex > >::type > pqueue_type; test_fcpqueue<pqueue_type>(); } } // namespace priorty_queue
goswamia/libcds
tests/test-hdr/priority_queue/hdr_fcpqueue_deque.cpp
C++
bsd-2-clause
1,563
Run this [Elasticsearch][] image with: $ docker run -d --name elasticsearch-0 gentoobb/elasticsearch Then [link][linking] to it from your client container: $ docker run --link elasticsearch-0:elastic your-client For example, we can use the busybox image and wget to query the elasticsearch container: $ docker run --link elasticsearch-0:es -it --rm gentoobb/busybox /bin/sh $ wget --quiet -O - "http://es:9200/" { "status" : 200, "name" : "Puff Adder", "version" : { "number" : "1.0.1", "build_hash" : "5c03844e1978e5cc924dab2a423dc63ce881c42b", "build_timestamp" : "2014-02-25T15:52:53Z", "build_snapshot" : false, "lucene_version" : "4.6" }, "tagline" : "You Know, for Search" } [Elasticsearch]: http://www.elasticsearch.org/ [linking]: http://docs.docker.io/en/latest/use/port_redirection/#linking-a-container
guruvan/gentoo-bb
dock/guruvan/images/elasticsearch/README.md
Markdown
bsd-2-clause
913
class Genometools < Formula desc "GenomeTools: The versatile open source genome analysis software" homepage "http://genometools.org/" # doi "10.1109/TCBB.2013.68" # tag "bioinformatics" url "http://genometools.org/pub/genometools-1.5.8.tar.gz" sha256 "c1864ce7df3bac9699a50a46b005995f96ddd287f9469d8448815aba900706eb" head "https://github.com/genometools/genometools.git" bottle do cellar :any sha256 "94362afb9cc048a3b26a3ee8731c9fc75e15b74bf165c10eb9826acf45419738" => :el_capitan sha256 "4bc79c22e52962a18051aabaf9dcf86681847abbf3f8430ca79075bce0047dbf" => :yosemite sha256 "b432b847a18e0e76cf861b7fed9c814d0b214a6667fc9f40d719fec5985df3fb" => :mavericks sha256 "511e0711954994632f5539fc59b7deb09b221c980b3065d9923f240be8b00051" => :x86_64_linux end option :universal option "with-check", "Run tests which require approximately one hour to run" option "without-pangocairo", "Build without Pango/Cairo (disables AnnotationSketch tool)" option "with-hmmer", "Build with HMMER (to enable protein domain search functionality in the ltrdigest tool)" depends_on "pkg-config" => :build depends_on :python => :recommended unless OS.mac? && MacOS.version >= :lion if build.with? "pangocairo" depends_on "cairo" depends_on "pango" end def install args = ["prefix=#{prefix}"] args << "cairo=no" if build.without? "pangocairo" args << "with-hmmer=yes" if build.with? "hmmer" args << "universal=yes" if build.universal? args << "64bit=yes" if MacOS.prefer_64_bit? system "make", *args system "make", "test", *args if build.with? "check" system "make", "install", *args prefix.install bin/"gtdata" if build.with? "python" cd "gtpython" do inreplace "gt/dlload.py", "gtlib = CDLL(\"libgenometools\" + soext)", "gtlib = CDLL(\"#{lib}/libgenometools\" + soext)" system "python", *Language::Python.setup_install_args(prefix) system "python", "-m", "unittest", "discover", "tests" end end end test do system "#{bin}/gt", "-test" system "python", "-c", "from gt import *" if build.with? "python" end end
kozo2/homebrew-science
genometools.rb
Ruby
bsd-2-clause
2,154
<!doctype html> <html> <head> <title>VisLab: {{page_title}}</title> <link href="/static/main.css" rel="stylesheet" type="text/css" /> <link rel="stylesheet" href="/static/font-awesome/css/font-awesome.min.css" /> <script src="/static/sprintf-0.7-beta1.js"></script> <script src="/static/jquery-1.10.2.min.js"></script> <script src="/static/d3.v3.min.js"></script> {% if page_mode == 'results' %} <link href="/static/ava_results_barplot.css" rel="stylesheet" type="text/css" /> <script src="/static/ava_results_barplot.js"></script> {% endif %} {% if dataset_name == 'ava' %} <script src="/static/jquery.sparkline.min.js"></script> {% endif %} {% if dataset_name == 'ava' and page_mode == 'data' %} <link href="/static/ava_scatterplot.css" rel="stylesheet" type="text/css" /> <script src="/static/d3.hexbin.v0.min.js"></script> <script src="/static/ava_scatterplot.js"></script> {% endif %} <script src="/static/ava_explorer.js"></script> <script> $(document).ready(function() { // Set global variables. page_mode = "{{page_mode}}"; dataset_name = "{{dataset_name}}"; page = {{args.page}}; // Set selects to the passed-in arguments. {% for arg_name, val in args.iteritems() %} $("#{{arg_name}}").val("{{val}}"); {% endfor %} // Execute the program logic. ava_explorer(); }); </script> </head> <body> <div class="container"> <h1> <a href="/"><i class="icon-home"></i></a>&nbsp; VisLab: {{page_title}} </h1> <!-- {% if page_mode == 'results' %} <div id="results-barplot"></div> {% endif %} --> {% if page_mode == 'data' and dataset_name == 'ava' %} <div id="data-scatterplot"></div> <input type='hidden' class='query_arg' id='rating_mean_min' /> <input type='hidden' class='query_arg' id='rating_mean_max' /> <input type='hidden' class='query_arg' id='rating_std_min' /> <input type='hidden' class='query_arg' id='rating_std_max' /> {% endif %} <div id="search-options" style="display: block; text-align: center;"> {% for name, options in select_options.iteritems() %} <label for="{{name}}">{{name}}</label> <select id="{{name}}" class='query_arg'> {% for x in options %} <option value="{{ x }}">{{ x }}</option> {% endfor %} </select> {% endfor %} </div> <div id="images"> <div class="results-nav" style="display: none;"> <h3>Matching images</h3> <span id="num-results"></span> results, <a href="#" id="prev-page">&larr;</a> page <span id="page"></span> <a href="#" id="next-page">&rarr;</a> </p> <div id="results"> </div> </div> </div> </body> </html>
Jai-Chaudhary/vislab
vislab/templates/results.html
HTML
bsd-2-clause
2,758
/* * Copyright (c) 2018 Abex * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.kourendlibrary; import java.util.HashMap; import java.util.Map; import lombok.Getter; import net.runelite.api.NpcID; public enum LibraryCustomer { VILLIA(NpcID.VILLIA, "Villia"), PROFESSOR_GRACKLEBONE(NpcID.PROFESSOR_GRACKLEBONE, "Prof. Gracklebone"), Sam(NpcID.SAM_7049, "Sam"); @Getter private final int id; @Getter private final String name; private static final Map<Integer, LibraryCustomer> byId = buildIdMap(); LibraryCustomer(int id, String name) { this.id = id; this.name = name; } public static LibraryCustomer getById(int id) { return byId.get(id); } private static Map<Integer, LibraryCustomer> buildIdMap() { Map<Integer, LibraryCustomer> byId = new HashMap<>(); for (LibraryCustomer c : values()) { byId.put(c.id, c); } return byId; } }
UniquePassive/runelite
runelite-client/src/main/java/net/runelite/client/plugins/kourendlibrary/LibraryCustomer.java
Java
bsd-2-clause
2,190
// Copyright (c) 2012 Ecma International. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /*--- es5id: 15.2.3.6-4-365 description: > ES5 Attributes - fail to update [[Enumerable]] attribute of data property ([[Writable]] is false, [[Enumerable]] is true, [[Configurable]] is false) to different value ---*/ var obj = {}; Object.defineProperty(obj, "prop", { value: 2010, writable: false, enumerable: true, configurable: false }); var propertyDefineCorrect = obj.hasOwnProperty("prop"); var desc1 = Object.getOwnPropertyDescriptor(obj, "prop"); assert.throws(TypeError, function() { Object.defineProperty(obj, "prop", { enumerable: false }); }); var desc2 = Object.getOwnPropertyDescriptor(obj, "prop"); assert(propertyDefineCorrect, 'propertyDefineCorrect !== true'); assert.sameValue(desc1.enumerable, true, 'desc1.enumerable'); assert.sameValue(obj.prop, 2010, 'obj.prop'); assert.sameValue(desc2.enumerable, true, 'desc2.enumerable');
sebastienros/jint
Jint.Tests.Test262/test/built-ins/Object/defineProperty/15.2.3.6-4-365.js
JavaScript
bsd-2-clause
1,015
package cz.metacentrum.perun.core.entry; import cz.metacentrum.perun.TestUtils.TestConsumer; import cz.metacentrum.perun.TestUtils.TestSupplier; import cz.metacentrum.perun.core.AbstractPerunIntegrationTest; import cz.metacentrum.perun.core.api.Attribute; import cz.metacentrum.perun.core.api.AttributeDefinition; import cz.metacentrum.perun.core.api.AttributesManager; import cz.metacentrum.perun.core.api.BeansUtils; import cz.metacentrum.perun.core.api.Candidate; import cz.metacentrum.perun.core.api.ExtSource; import cz.metacentrum.perun.core.api.ExtSourcesManager; import cz.metacentrum.perun.core.api.Facility; import cz.metacentrum.perun.core.api.Group; import cz.metacentrum.perun.core.api.Member; import cz.metacentrum.perun.core.api.MemberGroupStatus; import cz.metacentrum.perun.core.api.Owner; import cz.metacentrum.perun.core.api.OwnerType; import cz.metacentrum.perun.core.api.Paginated; import cz.metacentrum.perun.core.api.Resource; import cz.metacentrum.perun.core.api.RichUser; import cz.metacentrum.perun.core.api.RichUserExtSource; import cz.metacentrum.perun.core.api.Role; import cz.metacentrum.perun.core.api.Service; import cz.metacentrum.perun.core.api.SortingOrder; import cz.metacentrum.perun.core.api.SpecificUserType; import cz.metacentrum.perun.core.api.Sponsor; import cz.metacentrum.perun.core.api.Status; import cz.metacentrum.perun.core.api.User; import cz.metacentrum.perun.core.api.UserExtSource; import cz.metacentrum.perun.core.api.UsersManager; import cz.metacentrum.perun.core.api.UsersOrderColumn; import cz.metacentrum.perun.core.api.UsersPageQuery; import cz.metacentrum.perun.core.api.Vo; import cz.metacentrum.perun.core.api.exceptions.AnonymizationNotSupportedException; import cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException; import cz.metacentrum.perun.core.api.exceptions.ExtSourceNotExistsException; import cz.metacentrum.perun.core.api.exceptions.InternalErrorException; import cz.metacentrum.perun.core.api.exceptions.MemberNotExistsException; import cz.metacentrum.perun.core.api.exceptions.RelationExistsException; import cz.metacentrum.perun.core.api.exceptions.RelationNotExistsException; import cz.metacentrum.perun.core.api.exceptions.UserExtSourceExistsException; import cz.metacentrum.perun.core.api.exceptions.UserExtSourceNotExistsException; import cz.metacentrum.perun.core.api.exceptions.UserNotExistsException; import cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl; import org.json.JSONObject; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.time.LocalDate; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Integration tests of UsersManager. * * @author Pavel Zlamal <256627@mail.muni.cz> */ public class UsersManagerEntryIntegrationTest extends AbstractPerunIntegrationTest { private final static String CLASS_NAME = "UsersManager."; private final static String ATTR_UES_O = "o"; private final static String ATTR_UES_CN = "cn"; private final static String URN_ATTR_USER_PREFERRED_MAIL = AttributesManager.NS_USER_ATTR_DEF + ":preferredMail"; private final static String URN_ATTR_UES_O = AttributesManager.NS_UES_ATTR_DEF + ':' + ATTR_UES_O; private final static String URN_ATTR_UES_CN = AttributesManager.NS_UES_ATTR_DEF + ':' + ATTR_UES_CN; private User user; // our User private User serviceUser1; private User serviceUser2; private User sponsoredUser; private Vo vo; String userFirstName = ""; String userLastName = ""; String extLogin = ""; // his login in external source String extLogin2 = ""; final String extSourceName = "UserManagerEntryIntegrationTest"; final ExtSource extSource = new ExtSource(0, "testExtSource", "cz.metacentrum.perun.core.impl.ExtSourceInternal"); final UserExtSource userExtSource = new UserExtSource(); // create new User Ext Source private UsersManager usersManager; @Before public void setUp() throws Exception { usersManager = perun.getUsersManager(); // set random name and logins during every setUp method userFirstName = Long.toHexString(Double.doubleToLongBits(Math.random())); userLastName = Long.toHexString(Double.doubleToLongBits(Math.random())); extLogin = Long.toHexString(Double.doubleToLongBits(Math.random())); // his login in external source extLogin2 = Long.toHexString(Double.doubleToLongBits(Math.random())); vo = setUpVo(); setUpUser(); setUpUserExtSource(); setUpSpecificUser1ForUser(vo); setUpSpecificUser2ForUser(vo); setUpSponsoredUserForVo(vo); } @Test public void createUser() throws Exception { System.out.println(CLASS_NAME + "createUser"); user = new User(); user.setFirstName(userFirstName); user.setMiddleName(""); user.setLastName(userLastName); user.setTitleBefore(""); user.setTitleAfter(""); assertNotNull(perun.getUsersManagerBl().createUser(sess, user)); // create new user in database assertTrue("user id must be greater than zero", user.getId() > 0); usersForDeletion.add(user); } @Test public void createUserSetsUUID() throws Exception { System.out.println(CLASS_NAME + "createUserSetsUUID"); user = new User(); user.setFirstName(userFirstName); user.setLastName(userLastName); User createdUser = perun.getUsersManagerBl().createUser(sess, user); assertThat(createdUser.getUuid()).isNotNull(); assertThat(createdUser.getUuid().version()).isEqualTo(4); } @Test public void getUserById() throws Exception { System.out.println(CLASS_NAME + "getUserById"); User secondUser = usersManager.getUserById(sess, user.getId()); assertNotNull(secondUser); assertEquals("both users should be the same",user,secondUser); assertThat(secondUser.getUuid()).isNotNull(); assertThat(secondUser.getUuid().version()).isEqualTo(4); } @Test (expected=UserNotExistsException.class) public void getUserByIdWhenUserNotExist() throws Exception { System.out.println(CLASS_NAME + "getUserByIdWhenUserNotExist"); usersManager.getUserById(sess, 0); // shouldn't find user } @Test public void getUsers() throws Exception { System.out.println(CLASS_NAME + "getUsers"); List<User> users = usersManager.getUsers(sess); assertNotNull(users); assertTrue(users.size() > 0); assertTrue(users.contains(user)); } @Test public void getSpecificUsers() throws Exception { System.out.println(CLASS_NAME + "getServiceUsers"); List<User> users = usersManager.getSpecificUsers(sess); assertTrue(users.contains(serviceUser1)); assertTrue(users.contains(serviceUser2)); } @Test public void setAndUnsetSpecificUser() throws Exception { System.out.println(CLASS_NAME + "setAndUnsetSpecificUser"); setUpUser(); User owner = user; assertTrue("User should be service user", serviceUser1.isServiceUser()); usersManager.unsetSpecificUser(sess, serviceUser1, SpecificUserType.SERVICE); User user2 = usersManager.getUserById(sess, serviceUser1.getId()); assertTrue("User shouldn't be service user", !user2.isServiceUser()); usersManager.setSpecificUser(sess, user2, SpecificUserType.SERVICE, owner); user2 = usersManager.getUserById(sess, user2.getId()); assertTrue("User should be service user again", user2.isServiceUser()); List<User> owners = usersManager.getUsersBySpecificUser(sess, user2); assertTrue("There should be just our owner", owners.size() == 1 && owners.contains(owner)); } @Test public void getUsersByServiceUser1() throws Exception { System.out.println(CLASS_NAME + "getUsersByServiceUser1"); List<User> users = usersManager.getUsersBySpecificUser(sess, serviceUser1); assertTrue(users.contains(user)); assertTrue(users.size() == 1); } @Test public void getUsersByServiceUser2() throws Exception { System.out.println(CLASS_NAME + "getUsersByServiceUser2"); List<User> users = usersManager.getUsersBySpecificUser(sess, serviceUser2); assertTrue(users.contains(user)); assertTrue(users.size() == 1); } @Test public void getSpecificUsersByUser() throws Exception { System.out.println(CLASS_NAME + "getServiceUsersByUser"); List<User> users = usersManager.getSpecificUsersByUser(sess, user); assertTrue(users.contains(serviceUser1)); assertTrue(users.contains(serviceUser2)); assertTrue(users.size() == 2); } @Test public void modifyOwnership() throws Exception { System.out.println(CLASS_NAME + "modifyOwnership"); usersManager.removeSpecificUserOwner(sess, user, serviceUser1); List<User> users = usersManager.getSpecificUsersByUser(sess, user); assertTrue(users.contains(serviceUser2)); assertTrue(users.size() == 1); usersManager.removeSpecificUserOwner(sess, user, serviceUser2); users = usersManager.getSpecificUsersByUser(sess, user); assertTrue(users.isEmpty()); usersManager.addSpecificUserOwner(sess, user, serviceUser1); users = usersManager.getSpecificUsersByUser(sess, user); assertTrue(users.contains(serviceUser1)); assertTrue(users.size() == 1); usersManager.addSpecificUserOwner(sess, user, serviceUser2); users = usersManager.getSpecificUsersByUser(sess, user); assertTrue(users.contains(serviceUser1)); assertTrue(users.contains(serviceUser2)); assertTrue(users.size() == 2); } @Test (expected= RelationNotExistsException.class) public void removeNotExistingOwnership() throws Exception { System.out.println(CLASS_NAME + "removeNotExistingOwnership"); Member member = setUpMember(vo); User userOfMember = perun.getUsersManagerBl().getUserByMember(sess, member); usersManager.removeSpecificUserOwner(sess, userOfMember, serviceUser1); } @Test (expected= RelationNotExistsException.class) public void removeOwnershipTwiceInRow() throws Exception { System.out.println(CLASS_NAME + "removeOwnershipTwiceInRow"); usersManager.removeSpecificUserOwner(sess, user, serviceUser1); usersManager.removeSpecificUserOwner(sess, user, serviceUser1); } @Test (expected= RelationExistsException.class) public void addExistingOwnership() throws Exception { System.out.println(CLASS_NAME + "addExistingOwnership"); usersManager.addSpecificUserOwner(sess, user, serviceUser1); } @Test (expected= RelationExistsException.class) public void addOwnershipTwiceInRow() throws Exception { System.out.println(CLASS_NAME + "addOwnershipTwiceInRow"); Member member = setUpMember(vo); User userOfMember = perun.getUsersManagerBl().getUserByMember(sess, member); usersManager.addSpecificUserOwner(sess, userOfMember, serviceUser1); usersManager.addSpecificUserOwner(sess, userOfMember, serviceUser1); } @Test public void disableExistingOwnership() throws Exception { System.out.println(CLASS_NAME + "disableExistingOwnership"); Member member = setUpMember(vo); User userOfMember = perun.getUsersManagerBl().getUserByMember(sess, member); assertTrue(!perun.getUsersManagerBl().specificUserOwnershipExists(sess, userOfMember, serviceUser1)); assertTrue(!perun.getUsersManagerBl().specificUserOwnershipExists(sess, userOfMember, serviceUser2)); usersManager.addSpecificUserOwner(sess, userOfMember, serviceUser1); assertTrue(perun.getUsersManagerBl().specificUserOwnershipExists(sess, userOfMember, serviceUser1)); usersManager.addSpecificUserOwner(sess, userOfMember, serviceUser2); assertTrue(perun.getUsersManagerBl().specificUserOwnershipExists(sess, userOfMember, serviceUser2)); List<User> specificUsers = usersManager.getSpecificUsersByUser(sess, user); assertTrue(specificUsers.contains(serviceUser1)); assertTrue(specificUsers.contains(serviceUser2)); assertTrue(specificUsers.size() == 2); usersManager.removeSpecificUserOwner(sess, user, serviceUser1); assertTrue(perun.getUsersManagerBl().specificUserOwnershipExists(sess, user, serviceUser1)); assertTrue(perun.getUsersManagerBl().specificUserOwnershipExists(sess, user, serviceUser2)); specificUsers = usersManager.getSpecificUsersByUser(sess, user); assertTrue(specificUsers.contains(serviceUser2)); assertTrue(specificUsers.size() == 1); usersManager.removeSpecificUserOwner(sess, user, serviceUser2); assertTrue(perun.getUsersManagerBl().specificUserOwnershipExists(sess, user, serviceUser1)); assertTrue(perun.getUsersManagerBl().specificUserOwnershipExists(sess, user, serviceUser2)); assertTrue(perun.getUsersManagerBl().specificUserOwnershipExists(sess, user, serviceUser2)); specificUsers = usersManager.getSpecificUsersByUser(sess, user); assertTrue(specificUsers.isEmpty()); } @Test public void updateUser() throws Exception { System.out.println(CLASS_NAME + "updateUser"); user.setFirstName(Long.toHexString(Double.doubleToLongBits(Math.random()))); user.setMiddleName(""); user.setLastName(Long.toHexString(Double.doubleToLongBits(Math.random()))); user.setTitleBefore(""); user.setTitleAfter(""); User updatedUser = usersManager.updateUser(sess, user); assertNotNull(updatedUser); assertEquals("users should be the same after update in DB",user,updatedUser); User gettingUser = usersManager.getUserById(sess, updatedUser.getId()); assertEquals("users should be the same after updated in DB and getting from DB",gettingUser,updatedUser); } @Test (expected=UserNotExistsException.class) public void updateWhenUserNotExists() throws Exception { System.out.println(CLASS_NAME + "updateWhenUserNotExists"); usersManager.updateUser(sess, new User()); } @Test public void updateUserWithNullValues() throws Exception { System.out.println(CLASS_NAME + "updateUserWithNullValues"); user.setFirstName(null); user.setLastName(Long.toHexString(Double.doubleToLongBits(Math.random()))); user.setMiddleName(null); user.setTitleBefore(null); user.setTitleAfter(null); User updatedUser = usersManager.updateUser(sess, user); User gettingUser = usersManager.getUserById(sess, updatedUser.getId()); assertNotNull(updatedUser); assertEquals("users should be the same after update in DB", gettingUser, updatedUser); } @Test (expected=cz.metacentrum.perun.core.api.exceptions.IllegalArgumentException.class) public void updateUserWithNullValueInLastName() throws Exception { System.out.println(CLASS_NAME + "updateUserWithNullValueInLastName"); user.setFirstName(null); user.setLastName(null); User updateUser = usersManager.updateUser(sess, user); } @Test (expected=UserNotExistsException.class) public void deleteUser() throws Exception { System.out.println(CLASS_NAME + "deleteUser"); usersManager.deleteUser(sess, user, true); // force delete usersManager.getUserById(sess, user.getId()); // should be unable to get deleted user by his id } @Test (expected=UserNotExistsException.class) public void deleteUserWhenUserNotExists() throws Exception { System.out.println(CLASS_NAME + "deleteUserWhenUserNotExists"); usersManager.deleteUser(sess, new User(), true); // force delete // shouldn't find user } @Test public void anonymizeUser() throws Exception { System.out.println(CLASS_NAME + "anonymizeUser"); List<String> originalAttributesToKeep = BeansUtils.getCoreConfig().getAttributesToKeep(); // configure attributesToKeep so it contains only 1 attribute - preferredMail BeansUtils.getCoreConfig().setAttributesToKeep(Collections.singletonList(AttributesManager.NS_USER_ATTR_DEF + ":preferredMail")); // set preferredMail and phone attributes Attribute preferredMail = perun.getAttributesManagerBl().getAttribute(sess, user, AttributesManager.NS_USER_ATTR_DEF + ":preferredMail"); preferredMail.setValue("mail@mail.com"); perun.getAttributesManagerBl().setAttribute(sess, user, preferredMail); Attribute phone = perun.getAttributesManagerBl().getAttribute(sess, user, AttributesManager.NS_USER_ATTR_DEF + ":phone"); phone.setValue("+420555555"); perun.getAttributesManagerBl().setAttribute(sess, user, phone); usersManager.anonymizeUser(sess, user); // set attributesToKeep back to the original attributes BeansUtils.getCoreConfig().setAttributesToKeep(originalAttributesToKeep); User updatedUser = perun.getUsersManagerBl().getUserById(sess, user.getId()); assertTrue("Firstname should be null or empty.", updatedUser.getFirstName() == null || updatedUser.getFirstName().isEmpty()); assertTrue("Lastname should be null or empty.", updatedUser.getLastName() == null || updatedUser.getLastName().isEmpty()); Attribute updatedPreferredMail = perun.getAttributesManagerBl().getAttribute(sess, user, AttributesManager.NS_USER_ATTR_DEF + ":preferredMail"); Attribute updatedPhone = perun.getAttributesManagerBl().getAttribute(sess, user, AttributesManager.NS_USER_ATTR_DEF + ":phone"); assertEquals("PreferredMail attribute should be kept untouched.", updatedPreferredMail, preferredMail); assertNull("Phone attribute should be deleted.", updatedPhone.getValue()); } @Test(expected=UserNotExistsException.class) public void anonymizeUserWhenUserNotExists() throws Exception { System.out.println(CLASS_NAME + "anonymizeUserWhenUserNotExists"); usersManager.anonymizeUser(sess, new User()); // shouldn't find user } @Test public void anonymizeUserWhenAnonymizationNotSupported() throws Exception { System.out.println(CLASS_NAME + "anonymizeUserWhenAnonymizationNotSupported"); List<String> originalAttributesToAnonymize = BeansUtils.getCoreConfig().getAttributesToAnonymize(); // configure attributesToAnonymize so it contains only 1 attribute - dummy-test BeansUtils.getCoreConfig().setAttributesToAnonymize(Collections.singletonList(AttributesManager.NS_USER_ATTR_DEF + ":dummy-test")); // create dummy attribute Attribute attrLogin = new Attribute(); attrLogin.setNamespace(AttributesManager.NS_USER_ATTR_DEF); attrLogin.setFriendlyName("dummy-test"); attrLogin.setType(String.class.getName()); perun.getAttributesManager().createAttribute(sess, attrLogin); // set dummy attribute Attribute dummy = perun.getAttributesManagerBl().getAttribute(sess, user, AttributesManager.NS_USER_ATTR_DEF + ":dummy-test"); dummy.setValue("dummy"); perun.getAttributesManagerBl().setAttribute(sess, user, dummy); try { usersManager.anonymizeUser(sess, user); // anonymizeUser() should have thrown AnonymizationNotSupportedException fail(); } catch (AnonymizationNotSupportedException ex) { // this is expected } finally { // set attributesToAnonymize back to the original attributes BeansUtils.getCoreConfig().setAttributesToAnonymize(originalAttributesToAnonymize); } } @Test public void addIDPExtSourcesWithSameLogin() throws Exception { System.out.println(CLASS_NAME + "addIDPExtSourcesWithSameLogin"); ExtSource ext1 = new ExtSource("test1", ExtSourcesManagerEntry.EXTSOURCE_IDP); ExtSource ext2 = new ExtSource("test2", ExtSourcesManagerEntry.EXTSOURCE_IDP); ext1 = perun.getExtSourcesManagerBl().createExtSource(sess, ext1, null); ext2 = perun.getExtSourcesManagerBl().createExtSource(sess, ext2, null); UserExtSource ues1 = new UserExtSource(ext1, 1, "testExtLogin@test"); UserExtSource ues2 = new UserExtSource(ext2, 1, "testExtLogin@test"); // should be allowed since user is the same usersManager.addUserExtSource(sess, user, ues1); usersManager.addUserExtSource(sess, user, ues2); } @Test (expected=InternalErrorException.class) public void addIDPExtSourcesWithSameLoginDifferentUser() throws Exception { System.out.println(CLASS_NAME + "addIDPExtSourcesWithSameLoginDifferentUser"); ExtSource ext1 = new ExtSource("test1", ExtSourcesManagerEntry.EXTSOURCE_IDP); ExtSource ext2 = new ExtSource("test2", ExtSourcesManagerEntry.EXTSOURCE_IDP); ext1 = perun.getExtSourcesManagerBl().createExtSource(sess, ext1, null); ext2 = perun.getExtSourcesManagerBl().createExtSource(sess, ext2, null); UserExtSource ues1 = new UserExtSource(ext1, 1, "testExtLogin@test"); UserExtSource ues2 = new UserExtSource(ext2, 1, "testExtLogin@test"); // should fail since there are different users usersManager.addUserExtSource(sess, user, ues1); usersManager.addUserExtSource(sess, sponsoredUser, ues2); } @Test public void addIDPExtSourcesWithSameLoginDifferentUserDuplicates() throws Exception { System.out.println(CLASS_NAME + "addIDPExtSourcesWithSameLoginDifferentUserDuplicates"); ExtSource ext1 = new ExtSource("test1", ExtSourcesManagerEntry.EXTSOURCE_IDP); ExtSource ext2 = new ExtSource("test2", ExtSourcesManagerEntry.EXTSOURCE_IDP); ExtSource ext3 = new ExtSource("test3", ExtSourcesManagerEntry.EXTSOURCE_IDP); ext1 = perun.getExtSourcesManagerBl().createExtSource(sess, ext1, null); ext2 = perun.getExtSourcesManagerBl().createExtSource(sess, ext2, null); ext3 = perun.getExtSourcesManagerBl().createExtSource(sess, ext3, null); UserExtSource ues1 = new UserExtSource(ext1, 1, "testExtLogin@test"); UserExtSource ues2 = new UserExtSource(ext2, 1, "testExtLogin@test"); UserExtSource ues3 = new UserExtSource(ext3, 1, "testExtLogin@test"); usersManager.addUserExtSource(sess, user, ues1); usersManager.addUserExtSource(sess, user, ues2); // should fail since there is different user using these identities (multiple times) assertThatExceptionOfType(InternalErrorException.class) .isThrownBy(() -> usersManager.addUserExtSource(sess, sponsoredUser, ues3)); } @Test public void addUserExtSource() throws Exception { System.out.println(CLASS_NAME + "addUserExtSource"); ExtSource externalSource = perun.getExtSourcesManager().getExtSourceByName(sess, extSourceName); UserExtSource userExtSource2 = new UserExtSource(); userExtSource2.setLogin(extLogin2); userExtSource2.setExtSource(externalSource); UserExtSource returnedUserExtSource = usersManager.addUserExtSource(sess, user, userExtSource2); assertNotNull(returnedUserExtSource); assertTrue(returnedUserExtSource.getId() > 0); assertEquals("Both User Ext Sources should be the same",userExtSource2, returnedUserExtSource); } @Test (expected=UserExtSourceExistsException.class) public void addUserExtSourceWhenUserExtSourceAlreadyExists() throws Exception { System.out.println(CLASS_NAME + "addUserExtSourceWhenUserExtSourceAlreadyExists"); usersManager.addUserExtSource(sess, user, userExtSource); } @Test (expected=UserNotExistsException.class) public void addUserExtSourceWhenUserNotExists() throws Exception { System.out.println(CLASS_NAME + "addUserExtSourceWhenUserNotExists"); usersManager.addUserExtSource(sess, new User(), userExtSource); // shouldn't find user } @Test public void updateUserExtSource() throws Exception { System.out.println(CLASS_NAME + "updateUserExtSource"); ExtSource ext1 = new ExtSource("test1", ExtSourcesManagerEntry.EXTSOURCE_IDP); ext1 = perun.getExtSourcesManagerBl().createExtSource(sess, ext1, null); UserExtSource ues1 = new UserExtSource(ext1, 1, "testExtLogin@test"); ues1 = usersManager.addUserExtSource(sess, user, ues1); ues1.setLoa(2); usersManager.updateUserExtSource(sess, ues1); UserExtSource retrievedUes = usersManager.getUserExtSourceById(sess, ues1.getId()); Assert.assertTrue("LoA was not updated", retrievedUes.getLoa() == ues1.getLoa()); ues1.setLogin("changedTestExtLogin@test"); usersManager.updateUserExtSource(sess, ues1); retrievedUes = usersManager.getUserExtSourceById(sess, ues1.getId()); Assert.assertTrue("Login was not updated", Objects.equals(retrievedUes.getLogin(),ues1.getLogin())); } @Test (expected = UserExtSourceExistsException.class) public void updateUserExtSourceWhenExists() throws Exception { System.out.println(CLASS_NAME + "updateUserExtSourceWhenExists"); ExtSource ext1 = new ExtSource("test1", ExtSourcesManagerEntry.EXTSOURCE_IDP); ext1 = perun.getExtSourcesManagerBl().createExtSource(sess, ext1, null); UserExtSource ues1 = new UserExtSource(ext1, 1, "testExtLogin@test"); usersManager.addUserExtSource(sess, user, ues1); UserExtSource ues2 = new UserExtSource(ext1, 1, "testExtLogin2@test"); ues2 = usersManager.addUserExtSource(sess, user, ues2); ues2.setLogin("testExtLogin@test"); usersManager.updateUserExtSource(sess, ues2); } @Test public void getUserByUserExtSource() throws Exception { System.out.println(CLASS_NAME + "getUserByUserExtSource"); User secondUser = usersManager.getUserByUserExtSource(sess, userExtSource); assertEquals("users should be the same from both ext sources",user, secondUser); } @Test public void getUserByExtSourceNameAndExtLogin() throws Exception { System.out.println(CLASS_NAME + "getUserByExtSourceNameAndExtLogin"); String extSourceName = userExtSource.getExtSource().getName(); String extLogin = userExtSource.getLogin(); User secondUser = usersManager.getUserByExtSourceNameAndExtLogin(sess, extSourceName, extLogin); assertEquals("users should be the same from both ext sources",user, secondUser); } @Test public void getActiveUserExtSources() throws Exception { System.out.println(CLASS_NAME + "getActiveUserExtSources"); ExtSource externalSource = perun.getExtSourcesManager().getExtSourceByName(sess, extSourceName); UserExtSource userExtSource = usersManager.getUserExtSourceByExtLogin(sess, externalSource, extLogin); List<UserExtSource> ues = perun.getUsersManagerBl().getActiveUserExtSources(sess, user); assertTrue(ues.contains(userExtSource)); } @Test public void getActiveUserExtSourcesIfEmpty() throws Exception { System.out.println(CLASS_NAME + "getActiveUserExtSources"); User emptyUser = setUpEmptyUser(); List<UserExtSource> ues = perun.getUsersManagerBl().getUserExtSources(sess, emptyUser); for(UserExtSource uExtSource: ues) { perun.getUsersManagerBl().removeUserExtSource(sess, emptyUser, uExtSource); } ues = perun.getUsersManagerBl().getActiveUserExtSources(sess, emptyUser); assertTrue(ues.isEmpty()); } @Test public void getUserExtSourceByExtLogin() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourceByExtLogin"); ExtSource externalSource = perun.getExtSourcesManager().getExtSourceByName(sess, extSourceName); UserExtSource returnedUserExtSource = usersManager.getUserExtSourceByExtLogin(sess, externalSource, extLogin); assertEquals("both ext source should be the same", userExtSource, returnedUserExtSource); // check if both user ext sources are the same. } //TODO: for this test is needed to add creating login in registrar database /* @Test (expected=AlreadyReservedLoginException.class) public void isAlreadyReservedLogin() throws Exception { System.out.println(CLASS_NAME + "isAlreadyReservedLogin"); String namespace = "einfra"; String login = "martin_svehla"; perun.getUsersManagerBl().checkReservedLogins(sess, namespace, login); } */ @Test (expected=UserExtSourceNotExistsException.class) public void getUserExtSourceByExtLoginWhenExtLoginNotExists() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourceByExtLoginWhenExtLoginNotExists"); ExtSource externalSource = perun.getExtSourcesManager().getExtSourceByName(sess, extSourceName); usersManager.getUserExtSourceByExtLogin(sess, externalSource, ""); // shouldn't find UserExtSource (based on valid ext source and invalid login) } @Test (expected=ExtSourceNotExistsException.class) public void getUserExtSourceByExtLoginWhenExtSourceNotExists() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourceByExtLoginWhenExtSourceNotExists"); usersManager.getUserExtSourceByExtLogin(sess, new ExtSource(), ""); } @Test public void getUserExtSources() throws Exception { System.out.println(CLASS_NAME + "getUserExtSources"); List<UserExtSource> userExtSources = usersManager.getUserExtSources(sess, user); assertNotNull(userExtSources); assertTrue(userExtSources.size() == 2); // our user should have only two ext source, one we we added and the default one } @Test (expected=UserNotExistsException.class) public void getUserExtSourcesWhenUserNotExists() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourcesWhenUserNotExists"); usersManager.getUserExtSources(sess, new User()); // shouldn't find user } @Test public void getUserExtSourceById() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourceById"); int id = userExtSource.getId(); UserExtSource retUserExtSource = usersManager.getUserExtSourceById(sess, id); // get user ext source base on our user ext source ID assertNotNull("unable to get ext source by its ID", retUserExtSource); assertEquals("both user ext sources should be the same", userExtSource, retUserExtSource); } @Test (expected=UserExtSourceNotExistsException.class) public void getUserExtSourceByIdWhenExtSourceNotExists() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourceByIdWhenExtSourceNotExists"); usersManager.getUserExtSourceById(sess, 0); // shouldn't find ext source } @Test public void getUserExtSourcesByIds() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourcesByIds"); List<UserExtSource> userExtSources = usersManager.getUserExtSourcesByIds(sess, Collections.singletonList(userExtSource.getId())); assertEquals(userExtSources.size(), 1); assertTrue(userExtSources.contains(userExtSource)); // create another ues ExtSource externalSource = perun.getExtSourcesManager().getExtSourceByName(sess, extSourceName); UserExtSource anotherUes = perun.getUsersManager().addUserExtSource(sess, user, new UserExtSource(externalSource, extLogin2)); assertNotNull(anotherUes); userExtSources = usersManager.getUserExtSourcesByIds(sess, Arrays.asList(userExtSource.getId(), anotherUes.getId())); assertEquals(userExtSources.size(), 2); assertTrue(userExtSources.contains(userExtSource)); assertTrue(userExtSources.contains(anotherUes)); userExtSources = usersManager.getUserExtSourcesByIds(sess, Collections.singletonList(anotherUes.getId())); assertEquals(userExtSources.size(), 1); assertTrue(userExtSources.contains(anotherUes)); } @Test public void getUserExtSourceByListValue() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourceByListValue"); List<String> listValue = new ArrayList<>(); listValue.add("A-VALUE"); listValue.add("B-VALUE"); listValue.add("C-VALUE"); Attribute attribute = createUserExtSourceAttribute("testAttribute", ArrayList.class.getName(), listValue, true); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, attribute); for(String value : listValue) { UserExtSource returnedUserExtSource = perun.getUsersManagerBl().getUserExtSourceByUniqueAttributeValue(sess, attribute.getId(), value); assertEquals(userExtSource, returnedUserExtSource); } } @Test public void getUserExtSourceByMapValue() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourceByMapValue"); Map<String, String> mapValue = new LinkedHashMap<>(); mapValue.put("A-KEY", "A-VALUE"); mapValue.put("B-KEY", "B-VALUE"); mapValue.put("C-KEY", "C-VALUE"); Attribute attribute = createUserExtSourceAttribute("testAttribute", LinkedHashMap.class.getName(), mapValue, true); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, attribute); for(String key : mapValue.keySet()) { String uniqueValue = key + "=" + mapValue.get(key); UserExtSource returnedUserExtSource = perun.getUsersManagerBl().getUserExtSourceByUniqueAttributeValue(sess, attribute.getId(), uniqueValue); assertEquals(userExtSource, returnedUserExtSource); } } @Test public void getUserExtSourceByStringValue() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourceByStringValue"); Attribute attribute = createUserExtSourceAttribute("testAttribute", String.class.getName(), "testValue", true); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, attribute); UserExtSource returnedUserExtSource = perun.getUsersManagerBl().getUserExtSourceByUniqueAttributeValue(sess, attribute.getId(), attribute.valueAsString()); assertEquals(userExtSource, returnedUserExtSource); } @Test public void getUserExtSourceByIntegerValue() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourceByIntegerValue"); Attribute attribute = createUserExtSourceAttribute("testAttribute", Integer.class.getName(), 77, true); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, attribute); UserExtSource returnedUserExtSource = perun.getUsersManagerBl().getUserExtSourceByUniqueAttributeValue(sess, attribute.getId(), attribute.valueAsInteger().toString()); assertEquals(userExtSource, returnedUserExtSource); } @Test public void getUserExtSourceByBooleanValue() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourceByBooleanValue"); Attribute attribute = createUserExtSourceAttribute("testAttribute", Boolean.class.getName(), true, true); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, attribute); UserExtSource returnedUserExtSource = perun.getUsersManagerBl().getUserExtSourceByUniqueAttributeValue(sess, attribute.getId(), attribute.valueAsBoolean().toString()); assertEquals(userExtSource, returnedUserExtSource); } @Test (expected=InternalErrorException.class) public void getUserExtSourceByNonUniqueAttribute() throws Exception { System.out.println(CLASS_NAME + "getUserExtSourceByNonUniqueAttribute"); Attribute attribute = createUserExtSourceAttribute("testAttribute"); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, attribute); perun.getUsersManagerBl().getUserExtSourceByUniqueAttributeValue(sess, attribute.getId(), attribute.valueAsString()); } @Test (expected=UserExtSourceNotExistsException.class) public void removeUserExtSource() throws Exception { System.out.println(CLASS_NAME + "removeUserExtSource"); usersManager.removeUserExtSource(sess, user, userExtSource); usersManager.getUserExtSourceById(sess, userExtSource.getId()); // shloudn't get removed user ext source from DB } @Test (expected=UserExtSourceNotExistsException.class) public void removeUserExtSourceWithAttribute() throws Exception { System.out.println(CLASS_NAME + "removeUserExtSourceWithAttribute"); //Attribute 1 String name = "testingUEAttribute1"; Attribute userExtSourceAttribute1 = this.createUserExtSourceAttribute(name); userExtSourceAttribute1.setValue(name); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, userExtSourceAttribute1); //Attribute 1 name = "testingUEAttribute2"; Attribute userExtSourceAttribute2 = this.createUserExtSourceAttribute(name); userExtSourceAttribute2.setValue(name); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, userExtSourceAttribute2); usersManager.removeUserExtSource(sess, user, userExtSource); usersManager.getUserExtSourceById(sess, userExtSource.getId()); // shloudn't get removed user ext source from DB } @Test public void moveUserExtSource() throws Exception { System.out.println(CLASS_NAME + "removeUserExtSourceWithAttribute"); //TargetUser User targetUser = setUpEmptyUser(); usersManager.moveUserExtSource(sess, user, targetUser, userExtSource); UserExtSource returnedUserExtSource = usersManager.getUserExtSourceById(sess, userExtSource.getId()); assertEquals("returned user extSource should be assigned to the targetUser", targetUser.getId(), returnedUserExtSource.getUserId()); } @Test public void moveUserExtSourceWithAttribute() throws Exception { System.out.println(CLASS_NAME + "removeUserExtSourceWithAttribute"); //Attribute 1 String name = "testingUEAttribute1"; Attribute userExtSourceAttribute1 = this.createUserExtSourceAttribute(name); userExtSourceAttribute1.setValue(name); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, userExtSourceAttribute1); //Attribute 1 name = "testingUEAttribute2"; Attribute userExtSourceAttribute2 = this.createUserExtSourceAttribute(name); userExtSourceAttribute2.setValue(name); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, userExtSourceAttribute2); //TargetUser User targetUser = setUpEmptyUser(); usersManager.moveUserExtSource(sess, user, targetUser, userExtSource); UserExtSource returnedUserExtSource = usersManager.getUserExtSourceById(sess, userExtSource.getId()); assertEquals("returned user extSource should be assigned to the targetUser", targetUser.getId(), returnedUserExtSource.getUserId()); } @Test (expected=UserNotExistsException.class) public void removeUserExtSourceWhenUserNotExist() throws Exception { System.out.println(CLASS_NAME + "removeUserExtSourceWhenUserNotExist"); usersManager.removeUserExtSource(sess, new User(), userExtSource); // shouldn't find user } @Test (expected=InternalErrorException.class) public void removeUserExtSourcePersistent() throws Exception { System.out.println(CLASS_NAME + "removeUserExtSourcePersistent"); // Assuming ExtSource PERUN is persistent (set as property) ExtSource extSource = perun.getExtSourcesManagerBl().getExtSourceByName(sess, "PERUN"); List<UserExtSource> userExtSources = usersManager.getUserExtSources(sess, user); for (UserExtSource ues : userExtSources) { if (ues.getExtSource().equals(extSource)) { usersManager.removeUserExtSource(sess, user, ues); break; } } } @Test public void getUserByMember() throws Exception { System.out.println(CLASS_NAME + "getUserByMember"); Member member = setUpMember(vo); User firstUser = usersManager.getUserByMember(sess, member); assertNotNull("unable to get user by member from DB", firstUser); User secondUser = usersManager.getUserById(sess,firstUser.getId()); assertEquals("both users should be the same", firstUser, secondUser); } @Test (expected=MemberNotExistsException.class) public void getUserByMemberWhenMemberNotExist() throws Exception { System.out.println(CLASS_NAME + "getUserByMemberWhenMemberNotExist"); usersManager.getUserByMember(sess, new Member()); // shouldn't find member } @ Test public void getVosWhereUserIsAdmin() throws Exception { System.out.println(CLASS_NAME + "getVosWhereUserIsAdmin"); Member member = setUpMember(vo); User user = perun.getUsersManagerBl().getUserByMember(sess, member); perun.getVosManager().addAdmin(sess, vo, user); List<Vo> vos = usersManager.getVosWhereUserIsAdmin(sess, user); assertTrue("our user should be admin in one VO", vos.size() >= 1); } @ Test public void getVosWhereUserIsNotAdminButHisGroupIs() throws Exception { System.out.println(CLASS_NAME + "getVosWhereUserIsNotAdminButHisGroupIs"); Member member = setUpMember(vo); User user = perun.getUsersManagerBl().getUserByMember(sess, member); Group group = setUpGroup(vo, member); perun.getVosManager().addAdmin(sess, vo, group); List<Vo> vos = usersManager.getVosWhereUserIsAdmin(sess, user); assertTrue("our user should be admin in one VO", vos.size() >= 1); } @Test (expected=UserNotExistsException.class) public void getVosWhereUserIsAdminWhenUserNotExist() throws Exception { System.out.println(CLASS_NAME + "getVosWhereUserIsAdminWhenUserNotExist"); usersManager.getVosWhereUserIsAdmin(sess, new User()); // shouldn't find user } @ Test public void getGroupsWhereUserIsAdmin() throws Exception { System.out.println(CLASS_NAME + "getGroupsWhereUserIsAdmin"); Member member = setUpMember(vo); User returnedUser = usersManager.getUserByMember(sess, member); Group group1 = setUpGroup(vo, member, "testGroup1"); Group group2 = setUpGroup(vo, member, "testGroup2"); Group group3 = setUpGroup(vo, member, "testGroup3"); perun.getGroupsManager().removeAdmin(sess, group3, returnedUser); perun.getGroupsManager().addAdmin(sess, group3, group2); Group group4 = setUpGroup(vo, member, "testGroup4"); perun.getGroupsManager().removeAdmin(sess, group4, returnedUser); Vo vo2 = new Vo(0, "voForTest2", "voForTest2"); vo2 = perun.getVosManagerBl().createVo(sess, vo2); Member member2 = setUpMember(vo2); Group group5 = setUpGroup(vo2, member2, "testGroup5"); List<Group> groups = usersManager.getGroupsWhereUserIsAdmin(sess, returnedUser); assertTrue("our user should be admin at least in 4 groups", groups.size() >= 4); assertTrue("created group1 should be between returned groups and it is not", groups.contains(group1)); assertTrue("created group2 should be between returned groups and it is not", groups.contains(group2)); assertTrue("created group3 should be between returned groups and it is not", groups.contains(group3)); assertTrue("created group5 should be between returned groups and it is not", groups.contains(group5)); assertTrue("created group4 should not be between returned groups and it is", !groups.contains(group4)); } @ Test public void getGroupsWhereUserIsAdminWithSelectedVo() throws Exception { System.out.println(CLASS_NAME + "getGroupsWhereUserIsAdminWithSelectedVo"); Member member = setUpMember(vo); User returnedUser = usersManager.getUserByMember(sess, member); Group group1 = setUpGroup(vo, member, "testGroup1"); Group group2 = setUpGroup(vo, member, "testGroup2"); Group group3 = setUpGroup(vo, member, "testGroup3"); perun.getGroupsManager().removeAdmin(sess, group3, returnedUser); perun.getGroupsManager().addAdmin(sess, group3, group2); Group group4 = setUpGroup(vo, member, "testGroup4"); perun.getGroupsManager().removeAdmin(sess, group4, returnedUser); Vo vo2 = new Vo(0, "voForTest2", "voForTest2"); vo2 = perun.getVosManagerBl().createVo(sess, vo2); Member member2 = setUpMember(vo2); Group group5 = setUpGroup(vo2, member2, "testGroup5"); List<Group> groups = usersManager.getGroupsWhereUserIsAdmin(sess, vo, returnedUser); assertTrue("our user should be admin at least in 4 groups", groups.size() >= 3); assertTrue("created group1 should be between returned groups and it is not", groups.contains(group1)); assertTrue("created group2 should be between returned groups and it is not", groups.contains(group2)); assertTrue("created group3 should be between returned groups and it is not", groups.contains(group3)); assertTrue("created group5 should not be between returned groups and it is", !groups.contains(group5)); assertTrue("created group4 should not be between returned groups and it is", !groups.contains(group4)); } @Test (expected=UserNotExistsException.class) public void getGroupsWhereUserIsAdminWhenUserNotExist() throws Exception { System.out.println(CLASS_NAME + "getGroupsWhereUserIsAdminWhenUserNotExist"); usersManager.getGroupsWhereUserIsAdmin(sess, new User()); // shouldn't find user } @ Test public void getVosWhereUserIsMember() throws Exception { System.out.println(CLASS_NAME + "getVosWhereUserIsMember"); Member member = setUpMember(vo); User returnedUser = usersManager.getUserByMember(sess, member); List<Vo> vos = usersManager.getVosWhereUserIsMember(sess, returnedUser); assertTrue("our user should be member of one VO", vos.size() >= 1); } @Test (expected=UserNotExistsException.class) public void getVosWhereUserIsMemberWhenUserNotExist() throws Exception { System.out.println(CLASS_NAME + "getVosWhereUserIsMemberWhenUserNotExist"); usersManager.getVosWhereUserIsMember(sess, new User()); // shouldn't find user } @Test public void getAllowedResources() throws Exception { System.out.println(CLASS_NAME + "getAllowedResources"); Member member = setUpMember(vo); Group group = setUpGroup(vo, member); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Owner owner = new Owner(); owner.setName("UsersManagerTestOwner"); owner.setContact("testingOwner"); owner.setType(OwnerType.technical); perun.getOwnersManager().createOwner(sess, owner); perun.getFacilitiesManager().addOwner(sess, facility, owner); Resource resource = new Resource(); resource.setName("UsersManagerTestResource"); resource.setDescription("Testovaci"); resource = perun.getResourcesManager().createResource(sess, resource, vo, facility); perun.getResourcesManager().assignGroupToResource(sess, group, resource, false, false, false); // create resource, assign group with our member User user = usersManager.getUserByMember(sess, member); // get user from member with assigned resource List<Resource> resources = usersManager.getAllowedResources(sess, facility, user); assertTrue("our user should have allowed resource", resources.size() >= 1); assertTrue("created resource should be allowed",resources.contains(resource)); } @Test public void getAssociatedResources() throws Exception { System.out.println(CLASS_NAME + "getAssociatedResources"); Member member = setUpMember(vo); User user = usersManager.getUserByMember(sess, member); Group group = setUpGroup(vo, member); Facility facility = setUpFacility(); Resource resource = setUpResource(facility, vo); perun.getResourcesManager().assignGroupToResource(sess, group, resource, false, true, false); List<Resource> resources = perun.getUsersManagerBl().getAssociatedResources(sess, user); assertThat(resources).containsExactly(resource); } @Test public void getAssociatedResourcesForFacility() throws Exception { System.out.println(CLASS_NAME + "getAssociatedResourcesForFacility"); Member member = setUpMember(vo); User user = usersManager.getUserByMember(sess, member); Group group = setUpGroup(vo, member); Facility facility = setUpFacility(); Resource resource = setUpResource(facility, vo); perun.getResourcesManager().assignGroupToResource(sess, group, resource, false, true, false); List<Resource> resources = perun.getUsersManagerBl().getAssociatedResources(sess, facility, user); assertThat(resources).containsExactly(resource); } @Test public void findUsers() throws Exception { System.out.println(CLASS_NAME + "findUsers"); // Create second user User user2 = new User(); // Different first name from the default user in the test, contains a space user2.setFirstName(new StringBuilder(userFirstName).append('2').insert(userFirstName.length() / 2, ' ').toString()); user2.setMiddleName(""); user2.setLastName(userLastName); user2.setTitleBefore(""); user2.setTitleAfter(""); assertNotNull(perun.getUsersManagerBl().createUser(sess, user2)); // create new user in database usersForDeletion.add(user2); // save user for deletion after testing List<User> users = usersManager.findUsers(sess, userFirstName + " " + userLastName); // This search must contain at least one result assertTrue("results must contain at least one user", users.size() >= 1); // And must contain the user assertTrue("results must contain user", users.contains(user)); users = usersManager.findUsers(sess, userLastName); // This search must contain at least two results assertTrue("results must contain at least two users", users.size() >= 2); assertTrue("results must contain user and user2", users.contains(user) && users.contains(user2)); users = usersManager.findUsers(sess, userLastName + " " + userFirstName); // This search must contain at least one result assertTrue("results must contain at least one user", users.size() >= 1); assertTrue("results must contain user", users.contains(user)); // Search with a space in first name users = usersManager.findUsers(sess, user2.getFirstName()); // This search must contain at least one result assertTrue("results must contain at least one user", users.size() >= 1); assertTrue("results must contain user2", users.contains(user2)); } @Test public void findUsersByNameFullText() throws Exception { System.out.println(CLASS_NAME + "findUsersByNameFullText"); // Create second user User user2 = new User(); user2.setFirstName(userFirstName); user2.setMiddleName(""); user2.setLastName(userLastName+"2"); // Different last name from the default user in the test user2.setTitleBefore(""); user2.setTitleAfter(""); assertNotNull(perun.getUsersManagerBl().createUser(sess, user2)); // create new user in database usersForDeletion.add(user2); // save user for deletion after testing List<User> users = usersManager.findUsersByName(sess, userFirstName + " " + userLastName); // This search must contain at least one result assertTrue("results must contain at least one user", users.size() >= 1); // And must contain the user assertTrue("results must contain user", users.contains(user)); users = usersManager.findUsersByName(sess, userFirstName); // This search must contain at least two results assertTrue("results must contain at least two users", users.size() >= 2); assertTrue("results must contain user and user2", users.contains(user) && users.contains(user2)); } @Test public void findUsersByNameUsingExactFields() throws Exception { System.out.println(CLASS_NAME + "findUsersByNameUsingExactFields"); // Create second user User user2 = new User(); user2.setFirstName(userFirstName); user2.setMiddleName(""); user2.setLastName(userLastName+"2"); // Different last name from the default user in the test user2.setTitleBefore(""); user2.setTitleAfter(""); assertNotNull(perun.getUsersManagerBl().createUser(sess, user2)); // create new user in database usersForDeletion.add(user2); // save user for deletion after testing List<User> users = usersManager.findUsersByName(sess, "", userFirstName, "", userLastName, ""); // This search must contain at least one result assertTrue("results must contain at least one user", users.size() >= 1); // And must contain the user assertTrue("results must contain user", users.contains(user)); users = usersManager.findUsersByName(sess, "", userFirstName, "", "", ""); // This search must contain at least two results assertTrue("results must contain at least two users", users.size() >= 2); assertTrue("results must contain user and user2", users.contains(user) && users.contains(user2)); } @Test public void getUsersByAttribute() throws Exception { System.out.println(CLASS_NAME + "getUsersByAttribute"); // Check if the attribute already exists Attribute attr; AttributeDefinition attrDef; try { attrDef = perun.getAttributesManagerBl().getAttributeDefinition(sess, "urn:perun:user:attribute-def:opt:user_test_attribute"); } catch (AttributeNotExistsException e) { // Attribute doesn't exist, so create it attrDef = new AttributeDefinition(); attrDef.setNamespace("urn:perun:user:attribute-def:opt"); attrDef.setFriendlyName("user-test-attribute"); attrDef.setType(String.class.getName()); attrDef = perun.getAttributesManagerBl().createAttribute(sess, attrDef); } attr = new Attribute(attrDef); attr.setValue("UserAttribute"); // Set the attribute to the user perun.getAttributesManagerBl().setAttribute(sess, user, attr); assertTrue("results must contain user", usersManager.getUsersByAttribute(sess, attr).contains(user)); } @Test public void findUsersByExactName() throws Exception { System.out.println(CLASS_NAME + "findUsersByExactName"); String searchString = user.getFirstName()+user.getLastName(); List<User> users = perun.getUsersManager().findUsersByExactName(sess, searchString); assertTrue("No users found for exact match!", !users.isEmpty()); assertTrue("Test user not found in results!", users.contains(user)); // we shouldn't find anybody using substring searchString = searchString.substring(0, searchString.length()-3); users = perun.getUsersManager().findUsersByExactName(sess, searchString); assertTrue("Test user found in results when shouldn't!", !users.contains(user)); assertTrue("Some user found using substring when we shouldn't find anybody!", users.isEmpty()); } @Test public void findRichUsersWithAttributesByExactMatch() throws Exception { System.out.println(CLASS_NAME + "findRichUsersWithAttributesByExactMatch"); // Create second user User user2 = new User(); // Different first name from the default user in the test, contains a space user2.setFirstName(new StringBuilder(userFirstName).append('2').insert(userFirstName.length() / 2, ' ').toString()); user2.setMiddleName(""); user2.setLastName(userLastName); user2.setTitleBefore(""); user2.setTitleAfter(""); assertNotNull(perun.getUsersManagerBl().createUser(sess, user2)); // create new user in database usersForDeletion.add(user2); // save user for deletion after testing ArrayList<String> attrNames = new ArrayList<>(); attrNames.add("urn:perun:user:attribute-def:def:preferredMail"); String searchString = user.getFirstName() + " " + user.getLastName(); List<RichUser> users = perun.getUsersManager().findRichUsersWithAttributesByExactMatch(sess, searchString, attrNames); assertTrue("No users found for exact match!", !users.isEmpty()); searchString = user2.getFirstName() + " " + user2.getLastName(); users = perun.getUsersManager().findRichUsersWithAttributesByExactMatch(sess, searchString, attrNames); assertTrue("Results must contain user2!", users.contains(user2)); assertTrue("Results can't contain user!", !users.contains(user)); } @Test public void getUsersCount() throws Exception { System.out.println(CLASS_NAME + "getUsersCount"); setUpUser(); int count = perun.getUsersManager().getUsersCount(sess); assertTrue(count>0); } @Test public void getUsersByIds() throws Exception { System.out.println(CLASS_NAME + "getUsersByIds"); List<Integer> ids = new ArrayList<>(); Set<User> users = new HashSet<>(); for (int i = 1; i < 1002; i++) { User user2 = new User(); user2.setFirstName(userFirstName+i); perun.getUsersManagerBl().createUser(sess, user2); ids.add(user2.getId()); users.add(user2); } assertEquals(users, new HashSet<>(perun.getUsersManager().getUsersByIds(sess, ids))); } @Test public void getRichUsersByIds() throws Exception { System.out.println(CLASS_NAME + "getRichUsersByIds"); RichUser richUser = new RichUser(user, perun.getUsersManager().getUserExtSources(sess, user)); List<RichUser> richUsers = perun.getUsersManager().getRichUsersByIds(sess, Collections.singletonList(user.getId())); assertThat(richUsers).containsExactlyInAnyOrder(richUser); assertThat(richUsers.get(0).getUserExtSources()).containsExactlyInAnyOrderElementsOf(richUser.getUserExtSources()); User user2 = new User(); user2.setFirstName(userFirstName + "2"); user2 = perun.getUsersManagerBl().createUser(sess, user2); RichUser richUser2 = new RichUser(user2, perun.getUsersManager().getUserExtSources(sess, user2)); richUsers = perun.getUsersManager().getRichUsersByIds(sess, Arrays.asList(user.getId(), user2.getId())); assertThat(richUsers).containsExactlyInAnyOrder(richUser, richUser2); assertThat(richUsers.get(richUsers.indexOf(richUser)).getUserExtSources()).containsExactlyInAnyOrderElementsOf(richUser.getUserExtSources()); assertThat(richUsers.get(richUsers.indexOf(richUser2)).getUserExtSources()).containsExactlyInAnyOrderElementsOf(richUser2.getUserExtSources()); } @Test public void getRichUsersWithAttributesByIds() throws Exception { System.out.println(CLASS_NAME + "getRichUsersWithAttributesByIds"); RichUser richUser = new RichUser(user, perun.getUsersManager().getUserExtSources(sess, user), perun.getAttributesManager().getAttributes(sess, user)); List<RichUser> richUsers = perun.getUsersManager().getRichUsersByIds(sess, Collections.singletonList(user.getId())); assertThat(richUsers).containsExactlyInAnyOrder(richUser); assertThat(richUsers.get(0).getUserExtSources()).containsExactlyInAnyOrderElementsOf(richUser.getUserExtSources()); User user2 = new User(); user2.setFirstName(userFirstName + "2"); user2 = perun.getUsersManagerBl().createUser(sess, user2); RichUser richUser2 = new RichUser(user2, perun.getUsersManager().getUserExtSources(sess, user2), perun.getAttributesManager().getAttributes(sess, user2)); richUsers = perun.getUsersManager().getRichUsersWithAttributesByIds(sess, Arrays.asList(user.getId(), user2.getId())); assertThat(richUsers).containsExactlyInAnyOrder(richUser, richUser2); assertThat(richUsers.get(richUsers.indexOf(richUser)).getUserExtSources()).containsExactlyInAnyOrderElementsOf(richUser.getUserExtSources()); assertThat(richUsers.get(richUsers.indexOf(richUser2)).getUserExtSources()).containsExactlyInAnyOrderElementsOf(richUser2.getUserExtSources()); assertThat(richUsers.get(richUsers.indexOf(user)).getUserAttributes()).containsExactlyInAnyOrderElementsOf(richUser.getUserAttributes()); assertThat(richUsers.get(richUsers.indexOf(user2)).getUserAttributes()).containsExactlyInAnyOrderElementsOf(richUser2.getUserAttributes()); } @Test public void getGroupsWhereUserIsActive() throws Exception { System.out.println(CLASS_NAME + "getGroupsWhereUserIsActive(resource/facility)"); Member member = setUpMember(vo); User u = perun.getUsersManager().getUserByMember(sess, member); Facility f = new Facility(0, "name", "description"); f = perun.getFacilitiesManager().createFacility(sess, f); Resource r = new Resource(0, "name", "description", f.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, f); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(vo, member, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); // more groups case List<Group> groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, f, u); assertTrue("Should have only one group", groups.size() == 1); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, r, u); assertTrue("Should have only one group", groups.size() == 1); perun.getMembersManager().setStatus(sess, member, Status.EXPIRED); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, f, u); assertTrue("Should have no groups, since member should be VO expired", groups.isEmpty()); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, r, u); assertTrue("Should have no groups, since member should be VO expired", groups.isEmpty()); perun.getMembersManager().setStatus(sess, member, Status.VALID); perun.getResourcesManager().assignGroupToResource(sess, g2, r, false, false, false); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, f, u); assertTrue("Should have 2 groups", groups.size() == 2); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, r, u); assertTrue("Should have 2 groups", groups.size() == 2); perun.getGroupsManager().setMemberGroupStatus(sess, member, g1, MemberGroupStatus.EXPIRED); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, f, u); assertTrue("Should have 1 group since in one should be expired", groups.size() == 1); assertTrue("Should be a G1 group.", groups.contains(g2)); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, r, u); assertTrue("Should have 1 group since in one should be expired", groups.size() == 1); assertTrue("Should be a G1 group.", groups.contains(g2)); // more resources case Resource r2 = new Resource(0, "name2", "description2", f.getId()); r2 = perun.getResourcesManager().createResource(sess, r2, vo, f); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, f, u); assertTrue("Should have 1 group since in one should be expired", groups.size() == 1); assertTrue("Should be a G1 group.", groups.contains(g2)); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, r, u); assertTrue("Should have 1 group since in one should be expired", groups.size() == 1); assertTrue("Should be a G1 group.", groups.contains(g2)); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, r2, u); assertTrue("Should be empty since there are no groups on R2 resource.", groups.size() == 0); perun.getResourcesManager().removeGroupFromResource(sess, g2, r); perun.getResourcesManager().assignGroupToResource(sess, g2, r2, false, false, false); perun.getGroupsManager().setMemberGroupStatus(sess, member, g1, MemberGroupStatus.VALID); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, f, u); assertTrue("Should have 2 groups", groups.size() == 2); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, r, u); assertTrue("Should have 1 group on R1", groups.size() == 1); assertTrue("Should be a G1 group.", groups.contains(g1)); groups = perun.getUsersManager().getGroupsWhereUserIsActive(sess, r2, u); assertTrue("Should have 1 group on R2", groups.size() == 1); assertTrue("Should be a G2 group.", groups.contains(g2)); } @Test public void convertAttributesToJSON() { System.out.println(CLASS_NAME + "convertAttributesToJSON"); Candidate candidate = new Candidate(user, userExtSource); candidate.setAttributes(Collections.singletonMap(perun.getAttributesManager().NS_USER_ATTR + ":attribute", "value")); JSONObject jsonObject = candidate.convertAttributesToJSON(); assertEquals(8, jsonObject.length()); assertEquals("value", jsonObject.getJSONArray(perun.getAttributesManager().NS_USER_ATTR + ":attribute").getString(0)); assertEquals(userFirstName, jsonObject.getJSONArray(perun.getAttributesManager().NS_USER_ATTR_CORE + ":firstName").getString(0)); } @Test public void convertAttributesWithNullToJSON() { System.out.println(CLASS_NAME + "convertAttributesWithNullToJSON"); Candidate candidate = new Candidate(user, userExtSource); candidate.setAttributes(Collections.singletonMap(perun.getAttributesManager().NS_USER_ATTR + ":attribute", null)); JSONObject jsonObject = candidate.convertAttributesToJSON(); assertEquals(8, jsonObject.length()); assertTrue(jsonObject.getJSONArray(perun.getAttributesManager().NS_USER_ATTR + ":attribute").isNull(0)); } @Test public void getRichUserExtSourcesReturnsCorrectAttributes() throws Exception { System.out.println(CLASS_NAME + "getRichUserExtSourcesReturnsCorrectAttributes"); testGetRichUserExtSourceAttributes( () -> perun.getUsersManager().getRichUserExtSources(sess, user, Collections.singletonList(URN_ATTR_UES_CN)), (rues) -> { assertThat(rues).isNotNull(); assertThat(rues.getAttributes()) .anySatisfy(a -> assertThat(a.getFriendlyName()).isEqualTo(ATTR_UES_CN)); }, ATTR_UES_CN ); } @Test public void getRichUserExtSourcesDoesNotReturnNotSpecifiedAttribute() throws Exception { System.out.println(CLASS_NAME + "getRichUserExtSourcesDoesNotReturnNotSpecifiedAttribute"); testGetRichUserExtSourceAttributes( () -> perun.getUsersManager().getRichUserExtSources(sess, user, Collections.singletonList(URN_ATTR_UES_O)), (rues) -> { assertThat(rues).isNotNull(); assertThat(rues.getAttributes()) .noneSatisfy(a -> assertThat(a.getFriendlyName()).isEqualTo(ATTR_UES_CN)); }, ATTR_UES_O ); } @Test public void getRichUserExtSourcesReturnsAllAttributesForNull() throws Exception { System.out.println(CLASS_NAME + "getRichUserExtSourcesReturnsAllAttributesForNull"); testGetRichUserExtSourceAttributes( () -> perun.getUsersManagerBl().getRichUserExtSources(sess, user, null), (rues) -> { assertThat(rues).isNotNull(); assertThat(rues.getAttributes()).isNotEmpty(); }, ATTR_UES_O ); } @Test public void getRichUserExtSourcesReturnsNoAttributesForEmpty() throws Exception { System.out.println(CLASS_NAME + "getRichUserExtSourcesReturnsNoAttributesForEmpty"); testGetRichUserExtSourceAttributes( () -> perun.getUsersManagerBl().getRichUserExtSources(sess, user, Collections.emptyList()), (rues) -> { assertThat(rues).isNotNull(); assertThat(rues.getAttributes()).isEmpty(); }, ATTR_UES_O ); } @Test public void findUserById() { System.out.println(CLASS_NAME + "findUserById"); List<User> users = perun.getUsersManagerBl().findUsers(sess, String.valueOf(user.getId())); assertEquals(1, users.size()); assertEquals(user, users.get(0)); } @Test public void findUserByNames() { System.out.println(CLASS_NAME + "findUserByNames"); List<User> users = perun.getUsersManagerBl().findUsers(sess, user.getFirstName()); assertEquals(1, users.size()); assertEquals(user, users.get(0)); users = perun.getUsersManagerBl().findUsers(sess, user.getLastName()); assertEquals(1, users.size()); assertEquals(user, users.get(0)); } @Test public void findUserByUuid() { System.out.println(CLASS_NAME + "findUserByUuid"); List<User> users = perun.getUsersManagerBl().findUsers(sess, user.getUuid().toString()); assertThat(users).containsExactly(user); } @Test public void findUserByMemberAttribute() throws Exception { System.out.println(CLASS_NAME + "findUserByMemberAttribute"); Member member = setUpMember(vo); User user = perun.getUsersManagerBl().getUserByMember(sess, member); // add member attribute to CoreConfig List<String> attributes = BeansUtils.getCoreConfig().getAttributesToSearchUsersAndMembersBy(); attributes.add("urn:perun:member:attribute-def:def:test"); BeansUtils.getCoreConfig().setAttributesToSearchUsersAndMembersBy(attributes); AttributeDefinition attrDef = new AttributeDefinition(); attrDef.setNamespace("urn:perun:member:attribute-def:def"); attrDef.setFriendlyName("test"); attrDef.setType(String.class.getName()); attrDef = perun.getAttributesManagerBl().createAttribute(sess, attrDef); Attribute attribute = new Attribute(attrDef); attribute.setValue("login"); perun.getAttributesManagerBl().setAttribute(sess, member, attribute); List<User> users = perun.getUsersManagerBl().findUsers(sess, "login"); assertEquals(1, users.size()); assertEquals(user, users.get(0)); // reset CoreConfig to previous state attributes.remove("urn:perun:member:attribute-def:def:test"); BeansUtils.getCoreConfig().setAttributesToSearchUsersAndMembersBy(attributes); } @Test public void findUserByUserAttribute() throws Exception { System.out.println(CLASS_NAME + "findUserByUserAttribute"); // add user attribute to CoreConfig List<String> attributes = BeansUtils.getCoreConfig().getAttributesToSearchUsersAndMembersBy(); attributes.add("urn:perun:user:attribute-def:def:test"); BeansUtils.getCoreConfig().setAttributesToSearchUsersAndMembersBy(attributes); AttributeDefinition attrDef = new AttributeDefinition(); attrDef.setNamespace("urn:perun:user:attribute-def:def"); attrDef.setFriendlyName("test"); attrDef.setType(String.class.getName()); attrDef = perun.getAttributesManagerBl().createAttribute(sess, attrDef); Attribute attribute = new Attribute(attrDef); attribute.setValue("login"); perun.getAttributesManagerBl().setAttribute(sess, user, attribute); List<User> users = perun.getUsersManagerBl().findUsers(sess, "login"); assertEquals(1, users.size()); assertEquals(user, users.get(0)); // reset CoreConfig to previous state attributes.remove("urn:perun:user:attribute-def:def:test"); BeansUtils.getCoreConfig().setAttributesToSearchUsersAndMembersBy(attributes); } @Test public void findUserByUserExtSourceAttribute() throws Exception { System.out.println(CLASS_NAME + "findUserByUserExtSourceAttribute"); // add userExtSource attribute to CoreConfig List<String> attributes = BeansUtils.getCoreConfig().getAttributesToSearchUsersAndMembersBy(); attributes.add("urn:perun:ues:attribute-def:def:test"); BeansUtils.getCoreConfig().setAttributesToSearchUsersAndMembersBy(attributes); AttributeDefinition attrDef = new AttributeDefinition(); attrDef.setNamespace("urn:perun:ues:attribute-def:def"); attrDef.setFriendlyName("test"); attrDef.setType(String.class.getName()); attrDef = perun.getAttributesManagerBl().createAttribute(sess, attrDef); Attribute attribute = new Attribute(attrDef); attribute.setValue("login"); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, attribute); List<User> users = perun.getUsersManagerBl().findUsers(sess, "login"); assertEquals(1, users.size()); assertEquals(user, users.get(0)); // reset CoreConfig to previous state attributes.remove("urn:perun:ues:attribute-def:def:test"); BeansUtils.getCoreConfig().setAttributesToSearchUsersAndMembersBy(attributes); } @Test public void findUserByUserExtSourceLogin() { System.out.println(CLASS_NAME + "findUserByUserExtSourceLogin"); List<User> users = perun.getUsersManagerBl().findUsers(sess, extLogin); assertEquals(1, users.size()); assertEquals(user, users.get(0)); } @Test public void testCreateServiceUser() throws Exception { System.out.println(CLASS_NAME + "testCreateServiceUser"); Candidate candidate = setUpCandidateForSpecificUser1(); User createdUser = usersManager.createServiceUser(sess, candidate, Collections.emptyList()); createdUser = usersManager.getUserById(sess, createdUser.getId()); assertThat(createdUser).isEqualToComparingOnlyGivenFields(candidate, "firstName", "lastName"); assertThat(createdUser.isServiceUser()); } @Test public void testCreateServiceUserSetsAttributes() throws Exception { System.out.println(CLASS_NAME + "testCreateServiceUserSetsAttributes"); Candidate candidate = setUpCandidateForSpecificUser1(); Map<String, String> attrs = new HashMap<>(); String value = "asdf@sdf.df"; attrs.put(URN_ATTR_USER_PREFERRED_MAIL, value); candidate.setAttributes(attrs); User createdUser = usersManager.createServiceUser(sess, candidate, Collections.emptyList()); Attribute attr = perun.getAttributesManagerBl().getAttribute(sess, createdUser, URN_ATTR_USER_PREFERRED_MAIL); assertThat(attr.getValue()).isEqualTo(value); } @Test public void testCreateServiceUserSetsUes() throws Exception { System.out.println(CLASS_NAME + "testCreateServiceUserSetsUes"); Candidate candidate = setUpCandidateForSpecificUser1(); User createdUser = usersManager.createServiceUser(sess, candidate, Collections.emptyList()); UserExtSource candidateUes = candidate.getUserExtSource(); User userByUes = usersManager.getUserByExtSourceNameAndExtLogin(sess, candidateUes.getExtSource().getName(), candidateUes.getLogin()); assertThat(createdUser).isEqualByComparingTo(userByUes); } @Test public void testCreateServiceUserFailsForAlreadyExistingUes() throws Exception { System.out.println(CLASS_NAME + "testCreateServiceUserFailsForAlreadyExistingUes"); Candidate candidate = setUpCandidateForSpecificUser1(); usersManager.createServiceUser(sess, candidate, Collections.emptyList()); assertThatExceptionOfType(UserExtSourceExistsException.class) .isThrownBy(() -> usersManager.createServiceUser(sess, candidate, Collections.emptyList())); } @Test public void getSponsors() throws Exception { System.out.println(CLASS_NAME + "getSponsors"); setUpNamespaceAttribute(); String email = "email@sdf.sd"; AttributeDefinition emailAD = perun.getAttributesManagerBl() .getAttributeDefinition(sess, URN_ATTR_USER_PREFERRED_MAIL); Attribute emailAttribute = new Attribute(emailAD); emailAttribute.setValue(email); perun.getAttributesManagerBl().setAttribute(sess, user, emailAttribute); Member member = perun.getMembersManagerBl().getMemberByUser(sess, vo, sponsoredUser); LocalDate validity = LocalDate.now(); perun.getMembersManagerBl().updateSponsorshipValidity(sess, member, user, validity); Member sponsoredMember = perun.getMembersManagerBl().getMemberByUser(sess, vo, sponsoredUser); List<Sponsor> sponsors = usersManager .getSponsorsForMember(sess, sponsoredMember, Collections.singletonList(URN_ATTR_USER_PREFERRED_MAIL)); assertThat(sponsors) .hasSize(1); assertThat(sponsors.get(0).getUser()) .isEqualTo(user); assertThat(sponsors.get(0).getValidityTo()) .isEqualTo(validity); assertThat(sponsors.get(0).getUserAttributes()) .hasSize(1); assertThat(sponsors.get(0).getUserAttributes().get(0).getName()) .isEqualTo(URN_ATTR_USER_PREFERRED_MAIL); assertThat(sponsors.get(0).getUserAttributes().get(0).valueAsString()) .isEqualTo(email); } @Test public void getUsersPage_all() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_all"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); UsersPageQuery query = new UsersPageQuery(10, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertTrue(users.getData().size() > 1); assertTrue(users.getData().containsAll(usersManager.getRichUsersByIds(sess, List.of(user.getId(), user2.getId())))); } @Test public void getUsersPage_searchString() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_searchString"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "jane"); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(1, users.getData().size()); assertTrue(users.getData().contains(usersManager.getRichUser(sess, user2))); } @Test public void getUsersPage_orderByName() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_orderByName"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); UsersPageQuery query = new UsersPageQuery(10, 0, SortingOrder.ASCENDING, UsersOrderColumn.NAME, "smith"); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(2, users.getData().size()); assertEquals(users.getData().get(0), usersManager.getRichUser(sess, user2)); assertEquals(users.getData().get(1), usersManager.getRichUser(sess, user)); } @Test public void getUsersPage_withoutVo() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_withoutVo"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); UsersPageQuery query = new UsersPageQuery(10, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, true); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertTrue(users.getData().size() > 1); assertTrue(users.getData().containsAll(usersManager.getRichUsersByIds(sess, List.of(user.getId(), user2.getId())))); } @Test public void getUsersPage_withAttributes() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_withAttributes"); User user = setUpUser("john", "smith"); AttributeDefinition prefMailAttrDef = perun.getAttributesManagerBl().getAttributeDefinition(sess, URN_ATTR_USER_PREFERRED_MAIL); Attribute prefMail = new Attribute(prefMailAttrDef); prefMail.setValue("mail@mail.com"); perun.getAttributesManagerBl().setAttribute(sess, user, prefMail); UsersPageQuery query = new UsersPageQuery(1, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "smith"); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of(URN_ATTR_USER_PREFERRED_MAIL)); assertNotNull(users); assertEquals(1, users.getData().size()); assertTrue(users.getData().contains(usersManager.getRichUser(sess, user))); assertThat(users.getData().get(0).getUserAttributes()).containsOnly(prefMail); } @Test public void getUsersPage_userHasMembersInMultipleVos() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_userHasMembersInMultipleVos"); User user = setUpUser("jane", "smith"); Vo newVo = new Vo(1, "UserManagerTestV1o", "UMTestVo1"); Vo returnedVo = perun.getVosManager().createVo(sess, newVo); Member member = perun.getMembersManagerBl().createMember(sess, returnedVo, user); newVo = new Vo(2, "UserManagerTestV2o", "UMTestVo2"); returnedVo = perun.getVosManager().createVo(sess, newVo); member = perun.getMembersManagerBl().createMember(sess, returnedVo, user); newVo = new Vo(3, "UserManagerTestV3o", "UMTestVo3"); returnedVo = perun.getVosManager().createVo(sess, newVo); member = perun.getMembersManagerBl().createMember(sess, returnedVo, user); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "jane"); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(1, users.getData().size()); assertEquals(1, users.getTotalCount()); assertTrue(users.getData().contains(usersManager.getRichUser(sess, user))); } @Test public void getUsersPage_facilitySearchString() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_facilitySearchString"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Resource r = new Resource(0, "name", "description", facility.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, facility); Member member = perun.getMembersManagerBl().createMember(sess, vo, user); Member member2 = perun.getMembersManagerBl().createMember(sess, vo, user2); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(vo, member2, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); perun.getResourcesManager().assignGroupToResource(sess, g2, r, false, false, false); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "jane", facility.getId()); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(1, users.getData().size()); assertEquals(1, users.getTotalCount()); assertTrue(users.getData().contains(usersManager.getRichUser(sess, user2))); } @Test public void getUsersPage_facility() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_facility"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Resource r = new Resource(0, "name", "description", facility.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, facility); Member member = perun.getMembersManagerBl().createMember(sess, vo, user); Member member2 = perun.getMembersManagerBl().createMember(sess, vo, user2); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(vo, member2, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); perun.getResourcesManager().assignGroupToResource(sess, g2, r, false, false, false); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "", facility.getId()); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(2, users.getData().size()); assertEquals(2, users.getTotalCount()); assertTrue(users.getData().containsAll(usersManager.getRichUsersByIds(sess, List.of(user.getId(), user2.getId())))); } @Test public void getUsersPage_facilityOnlyAllowed() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_facilityOnlyAllowed"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Resource r = new Resource(0, "name", "description", facility.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, facility); Member member = perun.getMembersManagerBl().createMember(sess, vo, user); Member member2 = perun.getMembersManagerBl().createMember(sess, vo, user2); perun.getMembersManagerBl().setStatus(sess, member, Status.INVALID); perun.getMembersManagerBl().setStatus(sess, member2, Status.VALID); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(vo, member2, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); perun.getResourcesManager().assignGroupToResource(sess, g2, r, false, false, false); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "", facility.getId(), true); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(1, users.getData().size()); assertEquals(1, users.getTotalCount()); assertTrue(users.getData().contains(usersManager.getRichUser(sess, user2))); } @Test public void getUsersPage_facilityVo() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_facilityVo"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Resource r = new Resource(0, "name", "description", facility.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, facility); Member member = perun.getMembersManagerBl().createMember(sess, vo, user); Member member2 = perun.getMembersManagerBl().createMember(sess, vo, user2); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(vo, member2, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); perun.getResourcesManager().assignGroupToResource(sess, g2, r, false, false, false); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "", facility.getId(), vo.getId(), null, null); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(2, users.getData().size()); assertEquals(2, users.getTotalCount()); assertTrue(users.getData().containsAll(usersManager.getRichUsersByIds(sess, List.of(user.getId(), user2.getId())))); } @Test public void getUsersPage_facilityVoOnlyAllowed() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_facilityVoOnlyAllowed"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Resource r = new Resource(0, "name", "description", facility.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, facility); Member member = perun.getMembersManagerBl().createMember(sess, vo, user); Member member2 = perun.getMembersManagerBl().createMember(sess, vo, user2); perun.getMembersManagerBl().setStatus(sess, member, Status.INVALID); perun.getMembersManagerBl().setStatus(sess, member2, Status.VALID); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(vo, member2, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); perun.getResourcesManager().assignGroupToResource(sess, g2, r, false, false, false); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "", facility.getId(), vo.getId(), null, null, true); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(1, users.getData().size()); assertEquals(1, users.getTotalCount()); assertTrue(users.getData().contains(usersManager.getRichUser(sess, user2))); } @Test public void getUsersPage_facilityResource() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_facilityResource"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Resource r = new Resource(0, "name", "description", facility.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, facility); Member member = perun.getMembersManagerBl().createMember(sess, vo, user); Member member2 = perun.getMembersManagerBl().createMember(sess, vo, user2); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(vo, member2, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); perun.getResourcesManager().assignGroupToResource(sess, g2, r, false, false, false); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "", facility.getId(), null, null, r.getId()); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(2, users.getData().size()); assertEquals(2, users.getTotalCount()); assertTrue(users.getData().containsAll(usersManager.getRichUsersByIds(sess, List.of(user.getId(), user2.getId())))); } @Test public void getUsersPage_facilityResourceOnlyAllowed() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_facilityResourceOnlyAllowed"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Resource r = new Resource(0, "name", "description", facility.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, facility); Member member = perun.getMembersManagerBl().createMember(sess, vo, user); Member member2 = perun.getMembersManagerBl().createMember(sess, vo, user2); perun.getMembersManagerBl().setStatus(sess, member, Status.INVALID); perun.getMembersManagerBl().setStatus(sess, member2, Status.VALID); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(vo, member2, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); perun.getResourcesManager().assignGroupToResource(sess, g2, r, false, false, false); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "", facility.getId(), null, null, r.getId(), true); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(1, users.getData().size()); assertEquals(1, users.getTotalCount()); assertTrue(users.getData().contains(usersManager.getRichUser(sess, user2))); } @Test public void getUsersPage_facilityVoService() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_facilityVoService"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Resource r = new Resource(0, "name", "description", facility.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, facility); Service service = new Service(0, "dummy_service"); service = perun.getServicesManagerBl().createService(sess, service); perun.getResourcesManagerBl().assignService(sess, r, service); Member member = perun.getMembersManagerBl().createMember(sess, vo, user); Member member2 = perun.getMembersManagerBl().createMember(sess, vo, user2); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(vo, member2, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); perun.getResourcesManager().assignGroupToResource(sess, g2, r, false, false, false); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "", facility.getId(), vo.getId(), service.getId(), null); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(2, users.getData().size()); assertEquals(2, users.getTotalCount()); assertTrue(users.getData().containsAll(usersManager.getRichUsersByIds(sess, List.of(user.getId(), user2.getId())))); } @Test public void getUsersPage_facilityVoServiceOnlyAllowed() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_facilityVoServiceOnlyAllowed"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Resource r = new Resource(0, "name", "description", facility.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, facility); Service service = new Service(0, "dummy_service"); service = perun.getServicesManagerBl().createService(sess, service); perun.getResourcesManagerBl().assignService(sess, r, service); Member member = perun.getMembersManagerBl().createMember(sess, vo, user); Member member2 = perun.getMembersManagerBl().createMember(sess, vo, user2); perun.getMembersManagerBl().setStatus(sess, member, Status.INVALID); perun.getMembersManagerBl().setStatus(sess, member2, Status.VALID); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(vo, member2, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); perun.getResourcesManager().assignGroupToResource(sess, g2, r, false, false, false); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "", facility.getId(), vo.getId(), service.getId(), null, true); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(1, users.getData().size()); assertEquals(1, users.getTotalCount()); assertTrue(users.getData().contains(usersManager.getRichUser(sess, user2))); } @Test public void getUsersPage_facilityService() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_facilityService"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Resource r = new Resource(0, "name", "description", facility.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, facility); Vo newVo = new Vo(2, "UserManagerTestV2o", "UMTestVo2"); Vo returnedVo = perun.getVosManager().createVo(sess, newVo); Resource r2 = new Resource(1, "name1", "description1", facility.getId()); r2 = perun.getResourcesManager().createResource(sess, r2, returnedVo, facility); Service service = new Service(0, "dummy_service"); service = perun.getServicesManagerBl().createService(sess, service); perun.getResourcesManagerBl().assignService(sess, r, service); perun.getResourcesManagerBl().assignService(sess, r2, service); Member member = perun.getMembersManagerBl().createMember(sess, vo, user); Member member2 = perun.getMembersManagerBl().createMember(sess, returnedVo, user2); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(returnedVo, member2, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); perun.getResourcesManager().assignGroupToResource(sess, g2, r2, false, false, false); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "", facility.getId(), null, service.getId(), null); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(2, users.getData().size()); assertEquals(2, users.getTotalCount()); assertTrue(users.getData().containsAll(usersManager.getRichUsersByIds(sess, List.of(user.getId(), user2.getId())))); } @Test public void getUsersPage_facilityServiceOnlyAllowed() throws Exception { System.out.println(CLASS_NAME + "getUsersPage_facilityServiceOnlyAllowed"); User user = setUpUser("john", "smith"); User user2 = setUpUser("jane", "smith"); Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); facility = perun.getFacilitiesManager().createFacility(sess, facility); Resource r = new Resource(0, "name", "description", facility.getId()); r = perun.getResourcesManager().createResource(sess, r, vo, facility); Vo newVo = new Vo(2, "UserManagerTestV2o", "UMTestVo2"); Vo returnedVo = perun.getVosManager().createVo(sess, newVo); Resource r2 = new Resource(1, "name1", "description1", facility.getId()); r2 = perun.getResourcesManager().createResource(sess, r2, returnedVo, facility); Service service = new Service(0, "dummy_service"); service = perun.getServicesManagerBl().createService(sess, service); perun.getResourcesManagerBl().assignService(sess, r, service); perun.getResourcesManagerBl().assignService(sess, r2, service); Member member = perun.getMembersManagerBl().createMember(sess, vo, user); Member member2 = perun.getMembersManagerBl().createMember(sess, returnedVo, user2); perun.getMembersManagerBl().setStatus(sess, member, Status.INVALID); perun.getMembersManagerBl().setStatus(sess, member2, Status.VALID); Group g1 = setUpGroup(vo, member, "group1"); Group g2 = setUpGroup(returnedVo, member2, "group2"); perun.getResourcesManager().assignGroupToResource(sess, g1, r, false, false, false); perun.getResourcesManager().assignGroupToResource(sess, g2, r2, false, false, false); UsersPageQuery query = new UsersPageQuery(3, 0, SortingOrder.ASCENDING, UsersOrderColumn.ID, "", facility.getId(), null, service.getId(), null, true); Paginated<RichUser> users = usersManager.getUsersPage(sess, query, List.of()); assertNotNull(users); assertEquals(1, users.getData().size()); assertEquals(1, users.getTotalCount()); assertTrue(users.getData().contains(usersManager.getRichUser(sess, user2))); } // PRIVATE METHODS ------------------------------------------------------------- /** * This method is used to test attributes of returned richUserExtSource from given call. * * First, this method creates attributes for given names. Then the method executes * the given getRichUserExtSourceCall and finds the tested rues. Then calls the ruesValidation. * * @param getRichUserExtSourceCall call that returns richUserExtSources * @param ruesValidation validation of returned richUserExtSource * @param attrNamesToSetup names of ues attributes that will be set up for the tested ues * @throws Exception any exception */ private void testGetRichUserExtSourceAttributes( TestSupplier<List<RichUserExtSource>> getRichUserExtSourceCall, TestConsumer<RichUserExtSource> ruesValidation, String... attrNamesToSetup ) throws Exception { // set up ues attributes for (String attrName : attrNamesToSetup) { Attribute attribute = createUserExtSourceAttribute(attrName); perun.getAttributesManagerBl().setAttribute(sess, userExtSource, attribute); } // get richUserExtSources and find the one with set attribute RichUserExtSource desiredRues = null; List<RichUserExtSource> richUserExtSources = getRichUserExtSourceCall.getThrows(); for (RichUserExtSource richUserExtSource : richUserExtSources) { if (richUserExtSource.asUserExtSource().equals(userExtSource)) { desiredRues = richUserExtSource; } } // validate assertions ruesValidation.acceptThrows(desiredRues); } private void setUpUser() throws Exception { user = new User(); user.setFirstName(userFirstName); user.setMiddleName(""); user.setLastName(userLastName); user.setTitleBefore(""); user.setTitleAfter(""); assertNotNull(perun.getUsersManagerBl().createUser(sess, user)); // create new user in database usersForDeletion.add(user); // save user for deletion after testing } private User setUpUser(String firstName, String lastName) throws Exception { User user = new User(); user.setFirstName(firstName); user.setMiddleName(""); user.setLastName(lastName); user.setTitleBefore(""); user.setTitleAfter(""); assertNotNull(perun.getUsersManagerBl().createUser(sess, user)); // create new user in database usersForDeletion.add(user); // save user for deletion after testing return user; } private User setUpEmptyUser() throws Exception { User usr = new User(); usr.setFirstName(userFirstName); usr.setMiddleName(""); usr.setLastName(userLastName); usr.setTitleBefore(""); usr.setTitleAfter(""); assertNotNull(perun.getUsersManagerBl().createUser(sess, usr)); // create new user in database usersForDeletion.add(usr); // save user for deletion after testing return usr; } private void setUpSpecificUser1ForUser(Vo vo) throws Exception { Candidate candidate = setUpCandidateForSpecificUser1(); List<User> owners = new ArrayList<>(); owners.add(user); Member serviceMember = perun.getMembersManagerBl().createServiceMember(sess, vo, candidate, owners); perun.getMembersManagerBl().validateMember(sess, serviceMember); // set first candidate as member of test VO assertNotNull("No member created", serviceMember); serviceUser1 = usersManager.getUserByMember(sess, serviceMember); usersForDeletion.add(serviceUser1); } private void setUpSpecificUser2ForUser(Vo vo) throws Exception { Candidate candidate = setUpCandidateForSpecificUser2(); List<User> owners = new ArrayList<>(); owners.add(user); Member serviceMember = perun.getMembersManagerBl().createServiceMember(sess, vo, candidate, owners); perun.getMembersManagerBl().validateMember(sess, serviceMember); // set first candidate as member of test VO assertNotNull("No member created", serviceMember); serviceUser2 = usersManager.getUserByMember(sess, serviceMember); usersForDeletion.add(serviceUser2); } private void setUpSponsoredUserForVo(Vo vo) throws Exception { Candidate candidate = setUpCandidateForSponsoredUser(); AuthzResolverBlImpl.setRole(sess, user, vo, Role.SPONSOR); Member sponsoredMember = perun.getMembersManagerBl().createMember(sess, vo, candidate); perun.getMembersManagerBl().setSponsorshipForMember(sess, sponsoredMember, user); perun.getMembersManagerBl().validateMember(sess, sponsoredMember); // set first candidate as member of test VO assertNotNull("No member created", sponsoredMember); sponsoredUser = usersManager.getUserByMember(sess, sponsoredMember); usersForDeletion.add(sponsoredUser); } private void setUpUserExtSource() throws Exception { ExtSource externalSource = perun.getExtSourcesManager().getExtSourceByName(sess, extSourceName); // gets real external source object from database userExtSource.setExtSource(externalSource); // put real external source into user's external source userExtSource.setLogin(extLogin); // set users login in his ext source assertNotNull(usersManager.addUserExtSource(sess, user, userExtSource)); // create new user ext source in database } private Vo setUpVo() throws Exception { Vo newVo = new Vo(0, "UserManagerTestVo", "UMTestVo"); Vo returnedVo = perun.getVosManager().createVo(sess, newVo); // create test VO in database assertNotNull("unable to create testing Vo",returnedVo); newVo.setId(returnedVo.getId()); assertEquals("both VOs should be the same",newVo,returnedVo); ExtSource newExtSource = new ExtSource(extSourceName, ExtSourcesManager.EXTSOURCE_INTERNAL); ExtSource es = perun.getExtSourcesManager().createExtSource(sess, newExtSource, null); // get and create real external source from DB perun.getExtSourcesManager().addExtSource(sess, returnedVo, es); // add real ext source to our VO return returnedVo; } private Member setUpMember(Vo vo) throws Exception { // List<Candidate> candidates = perun.getVosManager().findCandidates(sess, vo, extLogin); // find candidates from ext source based on extLogin // assertTrue(candidates.size() > 0); Candidate candidate = setUpCandidate(); Member member = perun.getMembersManagerBl().createMember(sess, vo, candidate); // candidates.get(0) perun.getMembersManagerBl().validateMember(sess, member); // set first candidate as member of test VO assertNotNull("No member created", member); usersForDeletion.add(usersManager.getUserByMember(sess, member)); // save user for deletion after test return member; } private Group setUpGroup(Vo vo, Member member) throws Exception { return setUpGroup(vo, member, "UserManagerTestGroup"); } private Group setUpGroup(Vo vo, Member member, String groupName) throws Exception { Group group = new Group(groupName,""); group = perun.getGroupsManager().createGroup(sess, vo, group); perun.getGroupsManager().addMember(sess, group, member); User user = perun.getUsersManagerBl().getUserByMember(sess, member); perun.getGroupsManager().addAdmin(sess, group, user); return group; } private Candidate setUpCandidate(){ Candidate candidate = new Candidate(); //Mockito.mock(Candidate.class); candidate.setFirstName(userFirstName); candidate.setId(0); candidate.setMiddleName(""); candidate.setLastName(userLastName); candidate.setTitleBefore(""); candidate.setTitleAfter(""); final UserExtSource userExtSource = new UserExtSource(extSource, extLogin); candidate.setUserExtSource(userExtSource); candidate.setAttributes(new HashMap<>()); return candidate; } private Candidate setUpCandidateForSpecificUser1() { Candidate candidate = new Candidate(); //Mockito.mock(Candidate.class); candidate.setFirstName("(Service)"); candidate.setId(0); candidate.setMiddleName(""); candidate.setLastName("testingServiceUser01"); candidate.setTitleBefore(""); candidate.setTitleAfter(""); candidate.setServiceUser(true); final UserExtSource userExtSource = new UserExtSource(extSource, Long.toHexString(Double.doubleToLongBits(Math.random()))); candidate.setUserExtSource(userExtSource); candidate.setAttributes(new HashMap<>()); return candidate; } private Candidate setUpCandidateForSpecificUser2() { Candidate candidate = new Candidate(); //Mockito.mock(Candidate.class); candidate.setFirstName("(Service)"); candidate.setId(0); candidate.setMiddleName(""); candidate.setLastName("testingServiceUser02"); candidate.setTitleBefore(""); candidate.setTitleAfter(""); candidate.setServiceUser(true); final UserExtSource userExtSource = new UserExtSource(extSource, Long.toHexString(Double.doubleToLongBits(Math.random()))); candidate.setUserExtSource(userExtSource); candidate.setAttributes(new HashMap<>()); return candidate; } private Candidate setUpCandidateForSponsoredUser() { Candidate candidate = new Candidate(); candidate.setFirstName("Sponsored"); candidate.setId(0); candidate.setMiddleName(""); candidate.setLastName("User01"); candidate.setTitleBefore(""); candidate.setTitleAfter(""); candidate.setServiceUser(false); candidate.setSponsoredUser(true); final UserExtSource userExtSource = new UserExtSource(extSource, Long.toHexString(Double.doubleToLongBits(Math.random()))); candidate.setUserExtSource(userExtSource); candidate.setAttributes(new HashMap<>()); return candidate; } private Attribute createUserExtSourceAttribute(String name) throws Exception { return this.createUserExtSourceAttribute(name, String.class.getName(), "Testing value", false); } private Attribute createUserExtSourceAttribute(String name, String type, Object value, boolean unique) throws Exception { AttributeDefinition attrDef = new AttributeDefinition(); attrDef.setNamespace(AttributesManager.NS_UES_ATTR_DEF); attrDef.setDescription(name); attrDef.setFriendlyName(name); attrDef.setType(type); attrDef.setUnique(unique); attrDef = perun.getAttributesManagerBl().createAttribute(sess, attrDef); Attribute attribute = new Attribute(attrDef); attribute.setValue(value); return attribute; } private void setUpNamespaceAttribute() throws Exception { Attribute attrLogin = new Attribute(); attrLogin.setNamespace(AttributesManager.NS_USER_ATTR_DEF); attrLogin.setFriendlyName("login-namespace:dummy"); attrLogin.setType(String.class.getName()); perun.getAttributesManager().createAttribute(sess, attrLogin); } private Facility setUpFacility() throws Exception { Facility facility = new Facility(); facility.setName("UsersManagerTestFacility"); return perun.getFacilitiesManager().createFacility(sess, facility); } private Resource setUpResource(Facility facility, Vo vo) throws Exception { Resource resource = new Resource(); resource.setName("UsersManagerTestResource"); resource.setDescription("Testovaci"); return perun.getResourcesManager().createResource(sess, resource, vo, facility); } }
zlamalp/perun
perun-core/src/test/java/cz/metacentrum/perun/core/entry/UsersManagerEntryIntegrationTest.java
Java
bsd-2-clause
105,451
/* * Copyright (C) 2006, 2008 Apple Inc. All rights reserved. * Copyright (C) 2008 Collabora Ltd. All rights reserved. * Copyright (C) 2009 Holger Hans Peter Freyther * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "config.h" #include "PluginPackage.h" #include "MIMETypeRegistry.h" #include "PluginDatabase.h" #include "PluginDebug.h" #include "Timer.h" #include "npruntime_impl.h" #include <string.h> #include <wtf/OwnArrayPtr.h> #include <wtf/text/CString.h> namespace WebCore { PluginPackage::~PluginPackage() { // This destructor gets called during refresh() if PluginDatabase's // PluginSet hash is already populated, as it removes items from // the hash table. Calling the destructor on a loaded plug-in of // course would cause a crash, so we check to call unload before we // ASSERT. // FIXME: There is probably a better way to fix this. if (!m_loadCount) unloadWithoutShutdown(); else unload(); ASSERT(!m_isLoaded); } void PluginPackage::freeLibrarySoon() { ASSERT(!m_freeLibraryTimer.isActive()); ASSERT(m_module); ASSERT(!m_loadCount); m_freeLibraryTimer.startOneShot(0); } void PluginPackage::freeLibraryTimerFired(Timer<PluginPackage>*) { ASSERT(m_module); ASSERT(!m_loadCount); unloadModule(m_module); m_module = 0; } int PluginPackage::compare(const PluginPackage& compareTo) const { // Sort plug-ins that allow multiple instances first. bool AallowsMultipleInstances = !quirks().contains(PluginQuirkDontAllowMultipleInstances); bool BallowsMultipleInstances = !compareTo.quirks().contains(PluginQuirkDontAllowMultipleInstances); if (AallowsMultipleInstances != BallowsMultipleInstances) return AallowsMultipleInstances ? -1 : 1; // Sort plug-ins in a preferred path first. bool AisInPreferredDirectory = PluginDatabase::isPreferredPluginDirectory(parentDirectory()); bool BisInPreferredDirectory = PluginDatabase::isPreferredPluginDirectory(compareTo.parentDirectory()); if (AisInPreferredDirectory != BisInPreferredDirectory) return AisInPreferredDirectory ? -1 : 1; int diff = strcmp(name().utf8().data(), compareTo.name().utf8().data()); if (diff) return diff; diff = compareFileVersion(compareTo.version()); if (diff) return diff; return strcmp(parentDirectory().utf8().data(), compareTo.parentDirectory().utf8().data()); } PluginPackage::PluginPackage(const String& path, const time_t& lastModified) : m_isEnabled(true) , m_isLoaded(false) , m_loadCount(0) , m_path(path) , m_moduleVersion(0) , m_module(0) , m_lastModified(lastModified) , m_freeLibraryTimer(this, &PluginPackage::freeLibraryTimerFired) #if ENABLE(NETSCAPE_PLUGIN_METADATA_CACHE) , m_infoIsFromCache(true) #endif { m_fileName = pathGetFileName(m_path); m_parentDirectory = m_path.left(m_path.length() - m_fileName.length() - 1); } void PluginPackage::unload() { if (!m_isLoaded) return; if (--m_loadCount > 0) return; m_NPP_Shutdown(); unloadWithoutShutdown(); } void PluginPackage::unloadWithoutShutdown() { if (!m_isLoaded) return; ASSERT(!m_loadCount); ASSERT(m_module); // <rdar://5530519>: Crash when closing tab with pdf file (Reader 7 only) // If the plugin has subclassed its parent window, as with Reader 7, we may have // gotten here by way of the plugin's internal window proc forwarding a message to our // original window proc. If we free the plugin library from here, we will jump back // to code we just freed when we return, so delay calling FreeLibrary at least until // the next message loop freeLibrarySoon(); m_isLoaded = false; } void PluginPackage::setEnabled(bool enabled) { m_isEnabled = enabled; } PassRefPtr<PluginPackage> PluginPackage::createPackage(const String& path, const time_t& lastModified) { RefPtr<PluginPackage> package = adoptRef(new PluginPackage(path, lastModified)); if (!package->fetchInfo()) return 0; return package.release(); } #if ENABLE(NETSCAPE_PLUGIN_METADATA_CACHE) PassRefPtr<PluginPackage> PluginPackage::createPackageFromCache(const String& path, const time_t& lastModified, const String& name, const String& description, const String& mimeDescription) { RefPtr<PluginPackage> package = adoptRef(new PluginPackage(path, lastModified)); package->m_name = name; package->m_description = description; package->determineModuleVersionFromDescription(); package->setMIMEDescription(mimeDescription); package->m_infoIsFromCache = true; return package.release(); } #endif #if defined(XP_UNIX) void PluginPackage::determineQuirks(const String& mimeType) { if (MIMETypeRegistry::isJavaAppletMIMEType(mimeType)) { // Because a single process cannot create multiple VMs, and we cannot reliably unload a // Java VM, we cannot unload the Java plugin, or we'll lose reference to our only VM m_quirks.add(PluginQuirkDontUnloadPlugin); // Setting the window region to an empty region causes bad scrolling repaint problems // with the Java plug-in. m_quirks.add(PluginQuirkDontClipToZeroRectWhenScrolling); return; } if (mimeType == "application/x-shockwave-flash") { static const PlatformModuleVersion flashTenVersion(0x0a000000); if (compareFileVersion(flashTenVersion) >= 0) { // Flash 10.0 b218 doesn't like having a NULL window handle m_quirks.add(PluginQuirkDontSetNullWindowHandleOnDestroy); #if PLATFORM(QT) m_quirks.add(PluginQuirkRequiresGtkToolKit); #endif } else { // Flash 9 and older requests windowless plugins if we return a mozilla user agent m_quirks.add(PluginQuirkWantsMozillaUserAgent); } #if PLATFORM(QT) // Flash will crash on repeated calls to SetWindow in windowed mode m_quirks.add(PluginQuirkDontCallSetWindowMoreThanOnce); #endif #if CPU(X86_64) // 64-bit Flash freezes if right-click is sent in windowless mode m_quirks.add(PluginQuirkIgnoreRightClickInWindowlessMode); #endif m_quirks.add(PluginQuirkRequiresDefaultScreenDepth); m_quirks.add(PluginQuirkThrottleInvalidate); m_quirks.add(PluginQuirkThrottleWMUserPlusOneMessages); m_quirks.add(PluginQuirkFlashURLNotifyBug); } } #endif #if !OS(WINDOWS) void PluginPackage::determineModuleVersionFromDescription() { // It's a bit lame to detect the plugin version by parsing it // from the plugin description string, but it doesn't seem that // version information is available in any standardized way at // the module level, like in Windows if (m_description.isEmpty()) return; if (m_description.startsWith("Shockwave Flash") && m_description.length() >= 19) { // The flash version as a PlatformModuleVersion differs on Unix from Windows // since the revision can be larger than a 8 bits, so we allow it 16 here and // push the major/minor up 8 bits. Thus on Unix, Flash's version may be // 0x0a000000 instead of 0x000a0000. Vector<String> versionParts; m_description.substring(16).split(' ', /*allowEmptyEntries =*/ false, versionParts); if (versionParts.isEmpty()) return; if (versionParts.size() >= 1) { Vector<String> majorMinorParts; versionParts[0].split('.', majorMinorParts); if (majorMinorParts.size() >= 1) { bool converted = false; unsigned major = majorMinorParts[0].toUInt(&converted); if (converted) m_moduleVersion = (major & 0xff) << 24; } if (majorMinorParts.size() == 2) { bool converted = false; unsigned minor = majorMinorParts[1].toUInt(&converted); if (converted) m_moduleVersion |= (minor & 0xff) << 16; } } if (versionParts.size() >= 2) { String revision = versionParts[1]; if (revision.length() > 1 && (revision[0] == 'r' || revision[0] == 'b')) { revision.remove(0, 1); m_moduleVersion |= revision.toInt() & 0xffff; } } } } #endif #if ENABLE(NETSCAPE_PLUGIN_API) void PluginPackage::initializeBrowserFuncs() { memset(&m_browserFuncs, 0, sizeof(m_browserFuncs)); m_browserFuncs.size = sizeof(m_browserFuncs); m_browserFuncs.version = NPVersion(); m_browserFuncs.geturl = NPN_GetURL; m_browserFuncs.posturl = NPN_PostURL; m_browserFuncs.requestread = NPN_RequestRead; m_browserFuncs.newstream = NPN_NewStream; m_browserFuncs.write = NPN_Write; m_browserFuncs.destroystream = NPN_DestroyStream; m_browserFuncs.status = NPN_Status; m_browserFuncs.uagent = NPN_UserAgent; m_browserFuncs.memalloc = NPN_MemAlloc; m_browserFuncs.memfree = NPN_MemFree; m_browserFuncs.memflush = NPN_MemFlush; m_browserFuncs.reloadplugins = NPN_ReloadPlugins; m_browserFuncs.geturlnotify = NPN_GetURLNotify; m_browserFuncs.posturlnotify = NPN_PostURLNotify; m_browserFuncs.getvalue = NPN_GetValue; m_browserFuncs.setvalue = NPN_SetValue; m_browserFuncs.invalidaterect = NPN_InvalidateRect; m_browserFuncs.invalidateregion = NPN_InvalidateRegion; m_browserFuncs.forceredraw = NPN_ForceRedraw; m_browserFuncs.getJavaEnv = NPN_GetJavaEnv; m_browserFuncs.getJavaPeer = NPN_GetJavaPeer; m_browserFuncs.pushpopupsenabledstate = NPN_PushPopupsEnabledState; m_browserFuncs.poppopupsenabledstate = NPN_PopPopupsEnabledState; m_browserFuncs.pluginthreadasynccall = NPN_PluginThreadAsyncCall; m_browserFuncs.releasevariantvalue = _NPN_ReleaseVariantValue; m_browserFuncs.getstringidentifier = _NPN_GetStringIdentifier; m_browserFuncs.getstringidentifiers = _NPN_GetStringIdentifiers; m_browserFuncs.getintidentifier = _NPN_GetIntIdentifier; m_browserFuncs.identifierisstring = _NPN_IdentifierIsString; m_browserFuncs.utf8fromidentifier = _NPN_UTF8FromIdentifier; m_browserFuncs.intfromidentifier = _NPN_IntFromIdentifier; m_browserFuncs.createobject = _NPN_CreateObject; m_browserFuncs.retainobject = _NPN_RetainObject; m_browserFuncs.releaseobject = _NPN_ReleaseObject; m_browserFuncs.invoke = _NPN_Invoke; m_browserFuncs.invokeDefault = _NPN_InvokeDefault; m_browserFuncs.evaluate = _NPN_Evaluate; m_browserFuncs.getproperty = _NPN_GetProperty; m_browserFuncs.setproperty = _NPN_SetProperty; m_browserFuncs.removeproperty = _NPN_RemoveProperty; m_browserFuncs.hasproperty = _NPN_HasProperty; m_browserFuncs.hasmethod = _NPN_HasMethod; m_browserFuncs.setexception = _NPN_SetException; m_browserFuncs.enumerate = _NPN_Enumerate; m_browserFuncs.construct = _NPN_Construct; m_browserFuncs.getvalueforurl = NPN_GetValueForURL; m_browserFuncs.setvalueforurl = NPN_SetValueForURL; m_browserFuncs.getauthenticationinfo = NPN_GetAuthenticationInfo; } #endif #if ENABLE(PLUGIN_PACKAGE_SIMPLE_HASH) unsigned PluginPackage::hash() const { struct HashCodes { unsigned hash; time_t modifiedDate; } hashCodes; hashCodes.hash = m_path.impl()->hash(); hashCodes.modifiedDate = m_lastModified; return StringHasher::hashMemory<sizeof(hashCodes)>(&hashCodes); } bool PluginPackage::equal(const PluginPackage& a, const PluginPackage& b) { return a.m_description == b.m_description; } #endif int PluginPackage::compareFileVersion(const PlatformModuleVersion& compareVersion) const { // return -1, 0, or 1 if plug-in version is less than, equal to, or greater than // the passed version #if OS(WINDOWS) if (m_moduleVersion.mostSig != compareVersion.mostSig) return m_moduleVersion.mostSig > compareVersion.mostSig ? 1 : -1; if (m_moduleVersion.leastSig != compareVersion.leastSig) return m_moduleVersion.leastSig > compareVersion.leastSig ? 1 : -1; #else if (m_moduleVersion != compareVersion) return m_moduleVersion > compareVersion ? 1 : -1; #endif return 0; } #if ENABLE(NETSCAPE_PLUGIN_METADATA_CACHE) bool PluginPackage::ensurePluginLoaded() { if (!m_infoIsFromCache) return m_isLoaded; m_quirks = PluginQuirkSet(); m_name = String(); m_description = String(); m_fullMIMEDescription = String(); m_moduleVersion = 0; return fetchInfo(); } #endif }
yoavweiss/RespImg-WebCore
plugins/PluginPackage.cpp
C++
bsd-2-clause
13,860
cask :v1 => 'fingerlock' do version :latest sha256 :no_check url 'http://www.fingerkeyapp.com/download/FingerLock.dmg' homepage 'http://www.fingerkeyapp.com/' license :unknown # todo: improve this machine-generated value app 'FingerLock.app' end
andyshinn/homebrew-cask
Casks/fingerlock.rb
Ruby
bsd-2-clause
263
source ../testsupport.sh bpipe run -p batch_name=testsim -p sample_info=samples.txt test.groovy > test.out 2>&1 exists testsim_simulated_cnvs/XXXXX_test.bam # Run again should skip bpipe test -p batch_name=testsim -p sample_info=samples.txt test.groovy > test.out 2>&1 grep -q "Would execute" test.out && err "Found 'Would execute' indicating command would execute even when outputs were already created" true
vivovip/bpipe
tests/produce_wildcard_with_set_outputdir/run.sh
Shell
bsd-3-clause
424
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.react.views.scroll; import android.view.View; import android.view.ViewGroup; import com.facebook.react.bridge.JSApplicationIllegalArgumentException; import com.facebook.react.bridge.ReactContext; import com.facebook.react.uimanager.UIManagerHelper; /** Helper class that deals with emitting Scroll Events. */ public class ReactScrollViewHelper { public static final long MOMENTUM_DELAY = 20; public static final String OVER_SCROLL_ALWAYS = "always"; public static final String AUTO = "auto"; public static final String OVER_SCROLL_NEVER = "never"; /** Shared by {@link ReactScrollView} and {@link ReactHorizontalScrollView}. */ public static void emitScrollEvent(ViewGroup scrollView, float xVelocity, float yVelocity) { emitScrollEvent(scrollView, ScrollEventType.SCROLL, xVelocity, yVelocity); } public static void emitScrollBeginDragEvent(ViewGroup scrollView) { emitScrollEvent(scrollView, ScrollEventType.BEGIN_DRAG); } public static void emitScrollEndDragEvent( ViewGroup scrollView, float xVelocity, float yVelocity) { emitScrollEvent(scrollView, ScrollEventType.END_DRAG, xVelocity, yVelocity); } public static void emitScrollMomentumBeginEvent( ViewGroup scrollView, int xVelocity, int yVelocity) { emitScrollEvent(scrollView, ScrollEventType.MOMENTUM_BEGIN, xVelocity, yVelocity); } public static void emitScrollMomentumEndEvent(ViewGroup scrollView) { emitScrollEvent(scrollView, ScrollEventType.MOMENTUM_END); } private static void emitScrollEvent(ViewGroup scrollView, ScrollEventType scrollEventType) { emitScrollEvent(scrollView, scrollEventType, 0, 0); } private static void emitScrollEvent( ViewGroup scrollView, ScrollEventType scrollEventType, float xVelocity, float yVelocity) { View contentView = scrollView.getChildAt(0); if (contentView == null) { return; } ReactContext reactContext = (ReactContext) scrollView.getContext(); UIManagerHelper.getEventDispatcherForReactTag(reactContext, scrollView.getId()) .dispatchEvent( ScrollEvent.obtain( scrollView.getId(), scrollEventType, scrollView.getScrollX(), scrollView.getScrollY(), xVelocity, yVelocity, contentView.getWidth(), contentView.getHeight(), scrollView.getWidth(), scrollView.getHeight())); } public static int parseOverScrollMode(String jsOverScrollMode) { if (jsOverScrollMode == null || jsOverScrollMode.equals(AUTO)) { return View.OVER_SCROLL_IF_CONTENT_SCROLLS; } else if (jsOverScrollMode.equals(OVER_SCROLL_ALWAYS)) { return View.OVER_SCROLL_ALWAYS; } else if (jsOverScrollMode.equals(OVER_SCROLL_NEVER)) { return View.OVER_SCROLL_NEVER; } else { throw new JSApplicationIllegalArgumentException("wrong overScrollMode: " + jsOverScrollMode); } } }
exponent/exponent
android/ReactAndroid/src/main/java/com/facebook/react/views/scroll/ReactScrollViewHelper.java
Java
bsd-3-clause
3,192
/** * Copyright 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @providesModule ReactNativeFiber * @flow */ 'use strict'; import type { Element } from 'React'; import type { Fiber } from 'ReactFiber'; import type { ReactNodeList } from 'ReactTypes'; import type { ReactNativeBaseComponentViewConfig } from 'ReactNativeViewConfigRegistry'; const NativeMethodsMixin = require('NativeMethodsMixin'); const ReactFiberReconciler = require('ReactFiberReconciler'); const ReactGenericBatching = require('ReactGenericBatching'); const ReactNativeAttributePayload = require('ReactNativeAttributePayload'); const ReactNativeComponentTree = require('ReactNativeComponentTree'); const ReactNativeInjection = require('ReactNativeInjection'); const ReactNativeTagHandles = require('ReactNativeTagHandles'); const ReactNativeViewConfigRegistry = require('ReactNativeViewConfigRegistry'); const ReactPortal = require('ReactPortal'); const UIManager = require('UIManager'); const deepFreezeAndThrowOnMutationInDev = require('deepFreezeAndThrowOnMutationInDev'); const emptyObject = require('emptyObject'); const findNodeHandle = require('findNodeHandle'); const invariant = require('invariant'); const { injectInternals } = require('ReactFiberDevToolsHook'); const { precacheFiberNode, uncacheFiberNode, updateFiberProps, } = ReactNativeComponentTree; ReactNativeInjection.inject(); type Container = number; type Instance = { _children: Array<Instance | number>, _nativeTag: number, viewConfig: ReactNativeBaseComponentViewConfig, }; type Props = Object; type TextInstance = number; function NativeHostComponent(tag, viewConfig) { this._nativeTag = tag; this._children = []; this.viewConfig = viewConfig; } Object.assign(NativeHostComponent.prototype, NativeMethodsMixin); function recursivelyUncacheFiberNode(node : Instance | TextInstance) { if (typeof node === 'number') { // Leaf node (eg text) uncacheFiberNode(node); } else { uncacheFiberNode((node : any)._nativeTag); (node : any)._children.forEach(recursivelyUncacheFiberNode); } } const NativeRenderer = ReactFiberReconciler({ appendChild( parentInstance : Instance | Container, child : Instance | TextInstance ) : void { if (typeof parentInstance === 'number') { // Root container UIManager.setChildren( parentInstance, // containerTag [(child : any)._nativeTag] // reactTags ); } else { const children = parentInstance._children; children.push(child); UIManager.manageChildren( parentInstance._nativeTag, // containerTag [], // moveFromIndices [], // moveToIndices [(child : any)._nativeTag], // addChildReactTags [children.length - 1], // addAtIndices [], // removeAtIndices ); } }, appendInitialChild(parentInstance : Instance, child : Instance | TextInstance) : void { parentInstance._children.push(child); }, commitTextUpdate( textInstance : TextInstance, oldText : string, newText : string ) : void { UIManager.updateView( textInstance, // reactTag 'RCTRawText', // viewName {text: newText}, // props ); }, commitMount( instance : Instance, type : string, newProps : Props, internalInstanceHandle : Object ) : void { // Noop }, commitUpdate( instance : Instance, updatePayloadTODO : Object, type : string, oldProps : Props, newProps : Props, internalInstanceHandle : Object ) : void { const viewConfig = instance.viewConfig; updateFiberProps(instance._nativeTag, newProps); const updatePayload = ReactNativeAttributePayload.diff( oldProps, newProps, viewConfig.validAttributes ); UIManager.updateView( (instance : any)._nativeTag, // reactTag viewConfig.uiViewClassName, // viewName updatePayload, // props ); }, createInstance( type : string, props : Props, rootContainerInstance : Container, hostContext : {||}, internalInstanceHandle : Object ) : Instance { const tag = ReactNativeTagHandles.allocateTag(); const viewConfig = ReactNativeViewConfigRegistry.get(type); if (__DEV__) { for (let key in viewConfig.validAttributes) { if (props.hasOwnProperty(key)) { deepFreezeAndThrowOnMutationInDev(props[key]); } } } const updatePayload = ReactNativeAttributePayload.create( props, viewConfig.validAttributes ); UIManager.createView( tag, // reactTag viewConfig.uiViewClassName, // viewName rootContainerInstance, // rootTag updatePayload, // props ); const component = new NativeHostComponent(tag, viewConfig); precacheFiberNode(internalInstanceHandle, tag); updateFiberProps(tag, props); return component; }, createTextInstance( text : string, rootContainerInstance : Container, hostContext : {||}, internalInstanceHandle : Object, ) : TextInstance { const tag = ReactNativeTagHandles.allocateTag(); UIManager.createView( tag, // reactTag 'RCTRawText', // viewName rootContainerInstance, // rootTag {text: text} // props ); precacheFiberNode(internalInstanceHandle, tag); return tag; }, finalizeInitialChildren( parentInstance : Instance, type : string, props : Props, rootContainerInstance : Container, ) : boolean { // Don't send a no-op message over the bridge. if (parentInstance._children.length === 0) { return false; } // Map from child objects to native tags. // Either way we need to pass a copy of the Array to prevent it from being frozen. const nativeTags = parentInstance._children.map( (child) => typeof child === 'number' ? child // Leaf node (eg text) : child._nativeTag ); UIManager.setChildren( parentInstance._nativeTag, // containerTag nativeTags // reactTags ); return false; }, getRootHostContext() : {||} { return emptyObject; }, getChildHostContext() : {||} { return emptyObject; }, getPublicInstance(instance) { return instance; }, insertBefore( parentInstance : Instance | Container, child : Instance | TextInstance, beforeChild : Instance | TextInstance ) : void { // TODO (bvaughn): Remove this check when... // We create a wrapper object for the container in ReactNative render() // Or we refactor to remove wrapper objects entirely. // For more info on pros/cons see PR #8560 description. invariant( typeof parentInstance !== 'number', 'Container does not support insertBefore operation', ); const children = (parentInstance : any)._children; const beforeChildIndex = children.indexOf(beforeChild); const index = children.indexOf(child); // Move existing child or add new child? if (index >= 0) { children.splice(index, 1); children.splice(beforeChildIndex, 0, child); UIManager.manageChildren( (parentInstance : any)._nativeTag, // containerID [index], // moveFromIndices [beforeChildIndex], // moveToIndices [], // addChildReactTags [], // addAtIndices [], // removeAtIndices ); } else { children.splice(beforeChildIndex, 0, child); UIManager.manageChildren( (parentInstance : any)._nativeTag, // containerID [], // moveFromIndices [], // moveToIndices [(child : any)._nativeTag], // addChildReactTags [beforeChildIndex], // addAtIndices [], // removeAtIndices ); } }, prepareForCommit() : void { // Noop }, prepareUpdate( instance : Instance, type : string, oldProps : Props, newProps : Props, rootContainerInstance : Container, hostContext : {||} ) : null | Object { return emptyObject; }, removeChild( parentInstance : Instance | Container, child : Instance | TextInstance ) : void { recursivelyUncacheFiberNode(child); if (typeof parentInstance === 'number') { UIManager.manageChildren( parentInstance, // containerID [], // moveFromIndices [], // moveToIndices [], // addChildReactTags [], // addAtIndices [0], // removeAtIndices ); } else { const children = parentInstance._children; const index = children.indexOf(child); children.splice(index, 1); UIManager.manageChildren( parentInstance._nativeTag, // containerID [], // moveFromIndices [], // moveToIndices [], // addChildReactTags [], // addAtIndices [index], // removeAtIndices ); } }, resetAfterCommit() : void { // Noop }, resetTextContent(instance : Instance) : void { // Noop }, scheduleAnimationCallback: global.requestAnimationFrame, scheduleDeferredCallback: global.requestIdleCallback, shouldSetTextContent(props : Props) : boolean { // TODO (bvaughn) Revisit this decision. // Always returning false simplifies the createInstance() implementation, // But creates an additional child Fiber for raw text children. // No additional native views are created though. // It's not clear to me which is better so I'm deferring for now. // More context @ github.com/facebook/react/pull/8560#discussion_r92111303 return false; }, useSyncScheduling: true, }); ReactGenericBatching.injection.injectFiberBatchedUpdates( NativeRenderer.batchedUpdates ); const roots = new Map(); findNodeHandle.injection.injectFindNode( (fiber: Fiber) => { const instance: any = NativeRenderer.findHostInstance(fiber); return instance ? instance._nativeTag : null; } ); findNodeHandle.injection.injectFindRootNodeID( (instance) => instance._nativeTag ); const ReactNative = { findNodeHandle, render(element : Element<any>, containerTag : any, callback: ?Function) { let root = roots.get(containerTag); if (!root) { // TODO (bvaughn): If we decide to keep the wrapper component, // We could create a wrapper for containerTag as well to reduce special casing. root = NativeRenderer.createContainer(containerTag); roots.set(containerTag, root); } NativeRenderer.updateContainer(element, root, null, callback); return NativeRenderer.getPublicRootInstance(root); }, unmountComponentAtNode(containerTag : number) { const root = roots.get(containerTag); if (root) { // TODO: Is it safe to reset this now or should I wait since this unmount could be deferred? NativeRenderer.updateContainer(null, root, null, () => { roots.delete(containerTag); }); } }, unmountComponentAtNodeAndRemoveContainer(containerTag: number) { ReactNative.unmountComponentAtNode(containerTag); // Call back into native to remove all of the subviews from this container UIManager.removeRootView(containerTag); }, unstable_createPortal(children: ReactNodeList, containerTag : number, key : ?string = null) { return ReactPortal.createPortal(children, containerTag, null, key); }, unstable_batchedUpdates: ReactGenericBatching.batchedUpdates, }; if (typeof injectInternals === 'function') { injectInternals({ findFiberByHostInstance: ReactNativeComponentTree.getClosestInstanceFromNode, findHostInstanceByFiber: NativeRenderer.findHostInstance, }); } module.exports = ReactNative;
rlugojr/react
src/renderers/native/ReactNativeFiber.js
JavaScript
bsd-3-clause
11,790
// Copyright (c) 2013 Marshall A. Greenblatt. All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the name Chromium Embedded // Framework nor the names of its contributors may be used to endorse // or promote products derived from this software without specific prior // written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // --------------------------------------------------------------------------- // // This file was generated by the CEF translator tool and should not edited // by hand. See the translator.README.txt file in the tools directory for // more information. // #ifndef CEF_INCLUDE_CAPI_CEF_FIND_HANDLER_CAPI_H_ #define CEF_INCLUDE_CAPI_CEF_FIND_HANDLER_CAPI_H_ #pragma once #ifdef __cplusplus extern "C" { #endif #include "include/capi/cef_base_capi.h" /// // Implement this structure to handle events related to find results. The // functions of this structure will be called on the UI thread. /// typedef struct _cef_find_handler_t { /// // Base structure. /// cef_base_t base; /// // Called to report find results returned by cef_browser_t::find(). // |identifer| is the identifier passed to cef_browser_t::find(), |count| is // the number of matches currently identified, |selectionRect| is the location // of where the match was found (in window coordinates), |activeMatchOrdinal| // is the current position in the search results, and |finalUpdate| is true // (1) if this is the last find notification. /// void (CEF_CALLBACK *on_find_result)(struct _cef_find_handler_t* self, struct _cef_browser_t* browser, int identifier, int count, const cef_rect_t* selectionRect, int activeMatchOrdinal, int finalUpdate); } cef_find_handler_t; #ifdef __cplusplus } #endif #endif // CEF_INCLUDE_CAPI_CEF_FIND_HANDLER_CAPI_H_
svn2github/cef
include/capi/cef_find_handler_capi.h
C
bsd-3-clause
3,079
/* * Copyright 2018 Google Inc. * * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #include "SkCanvas.h" #include "SkCommandLineFlags.h" #include "SkGraphics.h" #include "SkMakeUnique.h" #include "SkOSFile.h" #include "SkOSPath.h" #include "Skottie.h" #include "SkottieUtils.h" #include "SkPictureRecorder.h" #include "SkStream.h" #include "SkSurface.h" #include <vector> DEFINE_string2(input , i, nullptr, "Input .json file."); DEFINE_string2(writePath, w, nullptr, "Output directory. Frames are names [0-9]{6}.png."); DEFINE_string2(format , f, "png" , "Output format (png or skp)"); DEFINE_double(t0, 0, "Timeline start [0..1]."); DEFINE_double(t1, 1, "Timeline stop [0..1]."); DEFINE_double(fps, 30, "Decode frames per second."); DEFINE_int32(width , 800, "Render width."); DEFINE_int32(height, 600, "Render height."); namespace { class Sink { public: virtual ~Sink() = default; Sink(const Sink&) = delete; Sink& operator=(const Sink&) = delete; bool handleFrame(const sk_sp<skottie::Animation>& anim, size_t idx) const { const auto frame_file = SkStringPrintf("0%06d.%s", idx, fExtension.c_str()); SkFILEWStream stream (SkOSPath::Join(FLAGS_writePath[0], frame_file.c_str()).c_str()); if (!stream.isValid()) { SkDebugf("Could not open '%s/%s' for writing.\n", FLAGS_writePath[0], frame_file.c_str()); return false; } return this->saveFrame(anim, &stream); } protected: Sink(const char* ext) : fExtension(ext) {} virtual bool saveFrame(const sk_sp<skottie::Animation>& anim, SkFILEWStream*) const = 0; private: const SkString fExtension; }; class PNGSink final : public Sink { public: PNGSink() : INHERITED("png") , fSurface(SkSurface::MakeRasterN32Premul(FLAGS_width, FLAGS_height)) { if (!fSurface) { SkDebugf("Could not allocate a %d x %d surface.\n", FLAGS_width, FLAGS_height); } } bool saveFrame(const sk_sp<skottie::Animation>& anim, SkFILEWStream* stream) const override { if (!fSurface) return false; auto* canvas = fSurface->getCanvas(); SkAutoCanvasRestore acr(canvas, true); canvas->concat(SkMatrix::MakeRectToRect(SkRect::MakeSize(anim->size()), SkRect::MakeIWH(FLAGS_width, FLAGS_height), SkMatrix::kCenter_ScaleToFit)); canvas->clear(SK_ColorTRANSPARENT); anim->render(canvas); auto png_data = fSurface->makeImageSnapshot()->encodeToData(); if (!png_data) { SkDebugf("Failed to encode frame!\n"); return false; } return stream->write(png_data->data(), png_data->size()); } private: const sk_sp<SkSurface> fSurface; using INHERITED = Sink; }; class SKPSink final : public Sink { public: SKPSink() : INHERITED("skp") {} bool saveFrame(const sk_sp<skottie::Animation>& anim, SkFILEWStream* stream) const override { SkPictureRecorder recorder; auto canvas = recorder.beginRecording(FLAGS_width, FLAGS_height); canvas->concat(SkMatrix::MakeRectToRect(SkRect::MakeSize(anim->size()), SkRect::MakeIWH(FLAGS_width, FLAGS_height), SkMatrix::kCenter_ScaleToFit)); anim->render(canvas); recorder.finishRecordingAsPicture()->serialize(stream); return true; } private: const sk_sp<SkSurface> fSurface; using INHERITED = Sink; }; class Logger final : public skottie::Logger { public: struct LogEntry { SkString fMessage, fJSON; }; void log(skottie::Logger::Level lvl, const char message[], const char json[]) override { auto& log = lvl == skottie::Logger::Level::kError ? fErrors : fWarnings; log.push_back({ SkString(message), json ? SkString(json) : SkString() }); } void report() const { SkDebugf("Animation loaded with %lu error%s, %lu warning%s.\n", fErrors.size(), fErrors.size() == 1 ? "" : "s", fWarnings.size(), fWarnings.size() == 1 ? "" : "s"); const auto& show = [](const LogEntry& log, const char prefix[]) { SkDebugf("%s%s", prefix, log.fMessage.c_str()); if (!log.fJSON.isEmpty()) SkDebugf(" : %s", log.fJSON.c_str()); SkDebugf("\n"); }; for (const auto& err : fErrors) show(err, " !! "); for (const auto& wrn : fWarnings) show(wrn, " ?? "); } private: std::vector<LogEntry> fErrors, fWarnings; }; } // namespace int main(int argc, char** argv) { SkCommandLineFlags::Parse(argc, argv); SkAutoGraphics ag; if (FLAGS_input.isEmpty() || FLAGS_writePath.isEmpty()) { SkDebugf("Missing required 'input' and 'writePath' args.\n"); return 1; } if (FLAGS_fps <= 0) { SkDebugf("Invalid fps: %f.\n", FLAGS_fps); return 1; } if (!sk_mkdir(FLAGS_writePath[0])) { return 1; } std::unique_ptr<Sink> sink; if (0 == strcmp(FLAGS_format[0], "png")) { sink = skstd::make_unique<PNGSink>(); } else if (0 == strcmp(FLAGS_format[0], "skp")) { sink = skstd::make_unique<SKPSink>(); } else { SkDebugf("Unknown format: %s\n", FLAGS_format[0]); return 1; } auto logger = sk_make_sp<Logger>(); auto anim = skottie::Animation::Builder() .setLogger(logger) .setResourceProvider( skottie_utils::FileResourceProvider::Make(SkOSPath::Dirname(FLAGS_input[0]))) .makeFromFile(FLAGS_input[0]); if (!anim) { SkDebugf("Could not load animation: '%s'.\n", FLAGS_input[0]); return 1; } logger->report(); static constexpr double kMaxFrames = 10000; const auto t0 = SkTPin(FLAGS_t0, 0.0, 1.0), t1 = SkTPin(FLAGS_t1, t0, 1.0), advance = 1 / std::min(anim->duration() * FLAGS_fps, kMaxFrames); size_t frame_index = 0; for (auto t = t0; t <= t1; t += advance) { anim->seek(t); sink->handleFrame(anim, frame_index++); } return 0; }
Hikari-no-Tenshi/android_external_skia
modules/skottie/src/SkottieTool.cpp
C++
bsd-3-clause
6,383
/* * Copyright (c) 2015 - present Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.infer.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Annotation for a boolean function returning true when the argument is null. */ @Retention(RetentionPolicy.CLASS) @Target({ElementType.METHOD}) public @interface TrueOnNull {}
algobardo/infer
infer/annotations/com/facebook/infer/annotation/TrueOnNull.java
Java
bsd-3-clause
688
// ============================================================================= // PROJECT CHRONO - http://projectchrono.org // // Copyright (c) 2014 projectchrono.org // All rights reserved. // // Use of this source code is governed by a BSD-style license that can be found // in the LICENSE file at the top level of the distribution and at // http://projectchrono.org/license-chrono.txt. // // ============================================================================= // Authors: Radu Serban, Justin Madsen // ============================================================================= // // UAZBUS wheel subsystem // // ============================================================================= #include <algorithm> #include "chrono_vehicle/ChVehicleModelData.h" #include "chrono_models/vehicle/uaz/UAZBUS_Wheel.h" #include "chrono_thirdparty/filesystem/path.h" namespace chrono { namespace vehicle { namespace uaz { // ----------------------------------------------------------------------------- // Static variables // ----------------------------------------------------------------------------- const double UAZBUS_Wheel::m_mass = 12.0; const ChVector<> UAZBUS_Wheel::m_inertia(0.240642, 0.410903, 0.240642); const double UAZBUS_Wheel::m_radius = 0.2032; const double UAZBUS_Wheel::m_width = 0.1524; // ----------------------------------------------------------------------------- // ----------------------------------------------------------------------------- UAZBUS_Wheel::UAZBUS_Wheel(const std::string& name) : ChWheel(name) { m_vis_mesh_file = "uaz/uaz_rim.obj"; } } // end namespace uaz } // end namespace vehicle } // end namespace chrono
projectchrono/chrono
src/chrono_models/vehicle/uaz/UAZBUS_Wheel.cpp
C++
bsd-3-clause
1,679
import copy import datetime import decimal import math import warnings from itertools import tee from django.db import connection from django.db.models.query_utils import QueryWrapper from django.conf import settings from django import forms from django.core import exceptions, validators from django.utils.datastructures import DictWrapper from django.utils.dateparse import parse_date, parse_datetime, parse_time from django.utils.functional import curry from django.utils.text import capfirst from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import smart_unicode, force_unicode, smart_str from django.utils.ipv6 import clean_ipv6_address class NOT_PROVIDED: pass # The values to use for "blank" in SelectFields. Will be appended to the start # of most "choices" lists. BLANK_CHOICE_DASH = [("", "---------")] BLANK_CHOICE_NONE = [("", "None")] class FieldDoesNotExist(Exception): pass # A guide to Field parameters: # # * name: The name of the field specifed in the model. # * attname: The attribute to use on the model object. This is the same as # "name", except in the case of ForeignKeys, where "_id" is # appended. # * db_column: The db_column specified in the model (or None). # * column: The database column for this field. This is the same as # "attname", except if db_column is specified. # # Code that introspects values, or does other dynamic things, should use # attname. For example, this gets the primary key value of object "obj": # # getattr(obj, opts.pk.attname) class Field(object): """Base class for all field types""" # Designates whether empty strings fundamentally are allowed at the # database level. empty_strings_allowed = True # These track each time a Field instance is created. Used to retain order. # The auto_creation_counter is used for fields that Django implicitly # creates, creation_counter is used for all user-specified fields. creation_counter = 0 auto_creation_counter = -1 default_validators = [] # Default set of validators default_error_messages = { 'invalid_choice': _(u'Value %r is not a valid choice.'), 'null': _(u'This field cannot be null.'), 'blank': _(u'This field cannot be blank.'), 'unique': _(u'%(model_name)s with this %(field_label)s ' u'already exists.'), } # Generic field type description, usually overriden by subclasses def _description(self): return _(u'Field of type: %(field_type)s') % { 'field_type': self.__class__.__name__ } description = property(_description) def __init__(self, verbose_name=None, name=None, primary_key=False, max_length=None, unique=False, blank=False, null=False, db_index=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True, unique_for_date=None, unique_for_month=None, unique_for_year=None, choices=None, help_text='', db_column=None, db_tablespace=None, auto_created=False, validators=[], error_messages=None): self.name = name self.verbose_name = verbose_name self.primary_key = primary_key self.max_length, self._unique = max_length, unique self.blank, self.null = blank, null # Oracle treats the empty string ('') as null, so coerce the null # option whenever '' is a possible value. if (self.empty_strings_allowed and connection.features.interprets_empty_strings_as_nulls): self.null = True self.rel = rel self.default = default self.editable = editable self.serialize = serialize self.unique_for_date, self.unique_for_month = (unique_for_date, unique_for_month) self.unique_for_year = unique_for_year self._choices = choices or [] self.help_text = help_text self.db_column = db_column self.db_tablespace = db_tablespace or settings.DEFAULT_INDEX_TABLESPACE self.auto_created = auto_created # Set db_index to True if the field has a relationship and doesn't # explicitly set db_index. self.db_index = db_index # Adjust the appropriate creation counter, and save our local copy. if auto_created: self.creation_counter = Field.auto_creation_counter Field.auto_creation_counter -= 1 else: self.creation_counter = Field.creation_counter Field.creation_counter += 1 self.validators = self.default_validators + validators messages = {} for c in reversed(self.__class__.__mro__): messages.update(getattr(c, 'default_error_messages', {})) messages.update(error_messages or {}) self.error_messages = messages def __cmp__(self, other): # This is needed because bisect does not take a comparison function. return cmp(self.creation_counter, other.creation_counter) def __deepcopy__(self, memodict): # We don't have to deepcopy very much here, since most things are not # intended to be altered after initial creation. obj = copy.copy(self) if self.rel: obj.rel = copy.copy(self.rel) memodict[id(self)] = obj return obj def to_python(self, value): """ Converts the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Returns the converted value. Subclasses should override this. """ return value def run_validators(self, value): if value in validators.EMPTY_VALUES: return errors = [] for v in self.validators: try: v(value) except exceptions.ValidationError, e: if hasattr(e, 'code') and e.code in self.error_messages: message = self.error_messages[e.code] if e.params: message = message % e.params errors.append(message) else: errors.extend(e.messages) if errors: raise exceptions.ValidationError(errors) def validate(self, value, model_instance): """ Validates value and throws ValidationError. Subclasses should override this to provide validation logic. """ if not self.editable: # Skip validation for non-editable fields. return if self._choices and value: for option_key, option_value in self.choices: if isinstance(option_value, (list, tuple)): # This is an optgroup, so look inside the group for # options. for optgroup_key, optgroup_value in option_value: if value == optgroup_key: return elif value == option_key: return msg = self.error_messages['invalid_choice'] % value raise exceptions.ValidationError(msg) if value is None and not self.null: raise exceptions.ValidationError(self.error_messages['null']) if not self.blank and value in validators.EMPTY_VALUES: raise exceptions.ValidationError(self.error_messages['blank']) def clean(self, value, model_instance): """ Convert the value's type and run validation. Validation errors from to_python and validate are propagated. The correct value is returned if no error is raised. """ value = self.to_python(value) self.validate(value, model_instance) self.run_validators(value) return value def db_type(self, connection): """ Returns the database column data type for this field, for the provided connection. """ # The default implementation of this method looks at the # backend-specific DATA_TYPES dictionary, looking up the field by its # "internal type". # # A Field class can implement the get_internal_type() method to specify # which *preexisting* Django Field class it's most similar to -- i.e., # a custom field might be represented by a TEXT column type, which is # the same as the TextField Django field type, which means the custom # field's get_internal_type() returns 'TextField'. # # But the limitation of the get_internal_type() / data_types approach # is that it cannot handle database column types that aren't already # mapped to one of the built-in Django field types. In this case, you # can implement db_type() instead of get_internal_type() to specify # exactly which wacky database column type you want to use. data = DictWrapper(self.__dict__, connection.ops.quote_name, "qn_") try: return (connection.creation.data_types[self.get_internal_type()] % data) except KeyError: return None @property def unique(self): return self._unique or self.primary_key def set_attributes_from_name(self, name): if not self.name: self.name = name self.attname, self.column = self.get_attname_column() if self.verbose_name is None and self.name: self.verbose_name = self.name.replace('_', ' ') def contribute_to_class(self, cls, name): self.set_attributes_from_name(name) self.model = cls cls._meta.add_field(self) if self.choices: setattr(cls, 'get_%s_display' % self.name, curry(cls._get_FIELD_display, field=self)) def get_attname(self): return self.name def get_attname_column(self): attname = self.get_attname() column = self.db_column or attname return attname, column def get_cache_name(self): return '_%s_cache' % self.name def get_internal_type(self): return self.__class__.__name__ def pre_save(self, model_instance, add): """ Returns field's value just before saving. """ return getattr(model_instance, self.attname) def get_prep_value(self, value): """ Perform preliminary non-db specific value checks and conversions. """ return value def get_db_prep_value(self, value, connection, prepared=False): """Returns field's value prepared for interacting with the database backend. Used by the default implementations of ``get_db_prep_save``and `get_db_prep_lookup``` """ if not prepared: value = self.get_prep_value(value) return value def get_db_prep_save(self, value, connection): """ Returns field's value prepared for saving into a database. """ return self.get_db_prep_value(value, connection=connection, prepared=False) def get_prep_lookup(self, lookup_type, value): """ Perform preliminary non-db specific lookup checks and conversions """ if hasattr(value, 'prepare'): return value.prepare() if hasattr(value, '_prepare'): return value._prepare() if lookup_type in ( 'regex', 'iregex', 'month', 'day', 'week_day', 'search', 'contains', 'icontains', 'iexact', 'startswith', 'istartswith', 'endswith', 'iendswith', 'isnull' ): return value elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'): return self.get_prep_value(value) elif lookup_type in ('range', 'in'): return [self.get_prep_value(v) for v in value] elif lookup_type == 'year': try: return int(value) except ValueError: raise ValueError("The __year lookup type requires an integer " "argument") raise TypeError("Field has invalid lookup: %s" % lookup_type) def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False): """ Returns field's value prepared for database lookup. """ if not prepared: value = self.get_prep_lookup(lookup_type, value) if hasattr(value, 'get_compiler'): value = value.get_compiler(connection=connection) if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'): # If the value has a relabel_aliases method, it will need to # be invoked before the final SQL is evaluated if hasattr(value, 'relabel_aliases'): return value if hasattr(value, 'as_sql'): sql, params = value.as_sql() else: sql, params = value._as_sql(connection=connection) return QueryWrapper(('(%s)' % sql), params) if lookup_type in ('regex', 'iregex', 'month', 'day', 'week_day', 'search'): return [value] elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'): return [self.get_db_prep_value(value, connection=connection, prepared=prepared)] elif lookup_type in ('range', 'in'): return [self.get_db_prep_value(v, connection=connection, prepared=prepared) for v in value] elif lookup_type in ('contains', 'icontains'): return ["%%%s%%" % connection.ops.prep_for_like_query(value)] elif lookup_type == 'iexact': return [connection.ops.prep_for_iexact_query(value)] elif lookup_type in ('startswith', 'istartswith'): return ["%s%%" % connection.ops.prep_for_like_query(value)] elif lookup_type in ('endswith', 'iendswith'): return ["%%%s" % connection.ops.prep_for_like_query(value)] elif lookup_type == 'isnull': return [] elif lookup_type == 'year': if self.get_internal_type() == 'DateField': return connection.ops.year_lookup_bounds_for_date_field(value) else: return connection.ops.year_lookup_bounds(value) def has_default(self): """ Returns a boolean of whether this field has a default value. """ return self.default is not NOT_PROVIDED def get_default(self): """ Returns the default value for this field. """ if self.has_default(): if callable(self.default): return self.default() return force_unicode(self.default, strings_only=True) if (not self.empty_strings_allowed or (self.null and not connection.features.interprets_empty_strings_as_nulls)): return None return "" def get_validator_unique_lookup_type(self): return '%s__exact' % self.name def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH): """Returns choices with a default blank choices included, for use as SelectField choices for this field.""" first_choice = include_blank and blank_choice or [] if self.choices: return first_choice + list(self.choices) rel_model = self.rel.to if hasattr(self.rel, 'get_related_field'): lst = [(getattr(x, self.rel.get_related_field().attname), smart_unicode(x)) for x in rel_model._default_manager.complex_filter( self.rel.limit_choices_to)] else: lst = [(x._get_pk_val(), smart_unicode(x)) for x in rel_model._default_manager.complex_filter( self.rel.limit_choices_to)] return first_choice + lst def get_choices_default(self): return self.get_choices() def get_flatchoices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH): """ Returns flattened choices with a default blank choice included. """ first_choice = include_blank and blank_choice or [] return first_choice + list(self.flatchoices) def _get_val_from_obj(self, obj): if obj is not None: return getattr(obj, self.attname) else: return self.get_default() def value_to_string(self, obj): """ Returns a string value of this field from the passed obj. This is used by the serialization framework. """ return smart_unicode(self._get_val_from_obj(obj)) def bind(self, fieldmapping, original, bound_field_class): return bound_field_class(self, fieldmapping, original) def _get_choices(self): if hasattr(self._choices, 'next'): choices, self._choices = tee(self._choices) return choices else: return self._choices choices = property(_get_choices) def _get_flatchoices(self): """Flattened version of choices tuple.""" flat = [] for choice, value in self.choices: if isinstance(value, (list, tuple)): flat.extend(value) else: flat.append((choice,value)) return flat flatchoices = property(_get_flatchoices) def save_form_data(self, instance, data): setattr(instance, self.name, data) def formfield(self, form_class=forms.CharField, **kwargs): """ Returns a django.forms.Field instance for this database Field. """ defaults = {'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text} if self.has_default(): if callable(self.default): defaults['initial'] = self.default defaults['show_hidden_initial'] = True else: defaults['initial'] = self.get_default() if self.choices: # Fields with choices get special treatment. include_blank = (self.blank or not (self.has_default() or 'initial' in kwargs)) defaults['choices'] = self.get_choices(include_blank=include_blank) defaults['coerce'] = self.to_python if self.null: defaults['empty_value'] = None form_class = forms.TypedChoiceField # Many of the subclass-specific formfield arguments (min_value, # max_value) don't apply for choice fields, so be sure to only pass # the values that TypedChoiceField will understand. for k in kwargs.keys(): if k not in ('coerce', 'empty_value', 'choices', 'required', 'widget', 'label', 'initial', 'help_text', 'error_messages', 'show_hidden_initial'): del kwargs[k] defaults.update(kwargs) return form_class(**defaults) def value_from_object(self, obj): """ Returns the value of this field in the given model instance. """ return getattr(obj, self.attname) def __repr__(self): """ Displays the module, class and name of the field. """ path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__) name = getattr(self, 'name', None) if name is not None: return '<%s: %s>' % (path, name) return '<%s>' % path class AutoField(Field): description = _("Automatic key") empty_strings_allowed = False def __init__(self, *args, **kwargs): assert kwargs.get('primary_key', False) is True, \ "%ss must have primary_key=True." % self.__class__.__name__ kwargs['blank'] = True Field.__init__(self, *args, **kwargs) def get_internal_type(self): return "AutoField" def validate(self, value, model_instance): pass def get_db_prep_value(self, value, connection, prepared=False): if value is None: return value return connection.ops.value_to_db_auto(value) def contribute_to_class(self, cls, name): assert not cls._meta.has_auto_field, \ "A model can't have more than one AutoField." super(AutoField, self).contribute_to_class(cls, name) cls._meta.has_auto_field = True cls._meta.auto_field = self def formfield(self, **kwargs): return None class BooleanField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _(u"'%s' value must be either True or False."), } description = _("Boolean (Either True or False)") def __init__(self, *args, **kwargs): kwargs['blank'] = True if 'default' not in kwargs and not kwargs.get('null'): kwargs['default'] = False Field.__init__(self, *args, **kwargs) def get_internal_type(self): return "BooleanField" def to_python(self, value): if value in (True, False): # if value is 1 or 0 than it's equal to True or False, but we want # to return a true bool for semantic reasons. return bool(value) if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False msg = self.error_messages['invalid'] % str(value) raise exceptions.ValidationError(msg) def get_prep_lookup(self, lookup_type, value): # Special-case handling for filters coming from a Web request (e.g. the # admin interface). Only works for scalar values (not lists). If you're # passing in a list, you might as well make things the right type when # constructing the list. if value in ('1', '0'): value = bool(int(value)) return super(BooleanField, self).get_prep_lookup(lookup_type, value) def get_prep_value(self, value): if value is None: return None return bool(value) def formfield(self, **kwargs): # Unlike most fields, BooleanField figures out include_blank from # self.null instead of self.blank. if self.choices: include_blank = (self.null or not (self.has_default() or 'initial' in kwargs)) defaults = {'choices': self.get_choices( include_blank=include_blank)} else: defaults = {'form_class': forms.BooleanField} defaults.update(kwargs) return super(BooleanField, self).formfield(**defaults) class CharField(Field): description = _("String (up to %(max_length)s)") def __init__(self, *args, **kwargs): super(CharField, self).__init__(*args, **kwargs) self.validators.append(validators.MaxLengthValidator(self.max_length)) def get_internal_type(self): return "CharField" def to_python(self, value): if isinstance(value, basestring) or value is None: return value return smart_unicode(value) def get_prep_value(self, value): return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). defaults = {'max_length': self.max_length} defaults.update(kwargs) return super(CharField, self).formfield(**defaults) # TODO: Maybe move this into contrib, because it's specialized. class CommaSeparatedIntegerField(CharField): default_validators = [validators.validate_comma_separated_integer_list] description = _("Comma-separated integers") def formfield(self, **kwargs): defaults = { 'error_messages': { 'invalid': _(u'Enter only digits separated by commas.'), } } defaults.update(kwargs) return super(CommaSeparatedIntegerField, self).formfield(**defaults) class DateField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _(u"'%s' value has an invalid date format. It must be " u"in YYYY-MM-DD format."), 'invalid_date': _(u"'%s' value has the correct format (YYYY-MM-DD) " u"but it is an invalid date."), } description = _("Date (without time)") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True Field.__init__(self, verbose_name, name, **kwargs) def get_internal_type(self): return "DateField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): return value.date() if isinstance(value, datetime.date): return value value = smart_str(value) try: parsed = parse_date(value) if parsed is not None: return parsed except ValueError: msg = self.error_messages['invalid_date'] % value raise exceptions.ValidationError(msg) msg = self.error_messages['invalid'] % value raise exceptions.ValidationError(msg) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.date.today() setattr(model_instance, self.attname, value) return value else: return super(DateField, self).pre_save(model_instance, add) def contribute_to_class(self, cls, name): super(DateField,self).contribute_to_class(cls, name) if not self.null: setattr(cls, 'get_next_by_%s' % self.name, curry(cls._get_next_or_previous_by_FIELD, field=self, is_next=True)) setattr(cls, 'get_previous_by_%s' % self.name, curry(cls._get_next_or_previous_by_FIELD, field=self, is_next=False)) def get_prep_lookup(self, lookup_type, value): # For "__month", "__day", and "__week_day" lookups, convert the value # to an int so the database backend always sees a consistent type. if lookup_type in ('month', 'day', 'week_day'): return int(value) return super(DateField, self).get_prep_lookup(lookup_type, value) def get_prep_value(self, value): return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts dates into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.value_to_db_date(value) def value_to_string(self, obj): val = self._get_val_from_obj(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): defaults = {'form_class': forms.DateField} defaults.update(kwargs) return super(DateField, self).formfield(**defaults) class DateTimeField(DateField): empty_strings_allowed = False default_error_messages = { 'invalid': _(u"'%s' value has an invalid format. It must be in " u"YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."), 'invalid_date': _(u"'%s' value has the correct format " u"(YYYY-MM-DD) but it is an invalid date."), 'invalid_datetime': _(u"'%s' value has the correct format " u"(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) " u"but it is an invalid date/time."), } description = _("Date (with time)") # __init__ is inherited from DateField def get_internal_type(self): return "DateTimeField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): value = datetime.datetime(value.year, value.month, value.day) if settings.USE_TZ: # For backwards compatibility, interpret naive datetimes in # local time. This won't work during DST change, but we can't # do much about it, so we let the exceptions percolate up the # call stack. warnings.warn(u"DateTimeField received a naive datetime (%s)" u" while time zone support is active." % value, RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value value = smart_str(value) try: parsed = parse_datetime(value) if parsed is not None: return parsed except ValueError: msg = self.error_messages['invalid_datetime'] % value raise exceptions.ValidationError(msg) try: parsed = parse_date(value) if parsed is not None: return datetime.datetime(parsed.year, parsed.month, parsed.day) except ValueError: msg = self.error_messages['invalid_date'] % value raise exceptions.ValidationError(msg) msg = self.error_messages['invalid'] % value raise exceptions.ValidationError(msg) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = timezone.now() setattr(model_instance, self.attname, value) return value else: return super(DateTimeField, self).pre_save(model_instance, add) # contribute_to_class is inherited from DateField, it registers # get_next_by_FOO and get_prev_by_FOO # get_prep_lookup is inherited from DateField def get_prep_value(self, value): value = self.to_python(value) if value is not None and settings.USE_TZ and timezone.is_naive(value): # For backwards compatibility, interpret naive datetimes in local # time. This won't work during DST change, but we can't do much # about it, so we let the exceptions percolate up the call stack. warnings.warn(u"DateTimeField received a naive datetime (%s)" u" while time zone support is active." % value, RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value def get_db_prep_value(self, value, connection, prepared=False): # Casts datetimes into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.value_to_db_datetime(value) def value_to_string(self, obj): val = self._get_val_from_obj(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): defaults = {'form_class': forms.DateTimeField} defaults.update(kwargs) return super(DateTimeField, self).formfield(**defaults) class DecimalField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _(u"'%s' value must be a decimal number."), } description = _("Decimal number") def __init__(self, verbose_name=None, name=None, max_digits=None, decimal_places=None, **kwargs): self.max_digits, self.decimal_places = max_digits, decimal_places Field.__init__(self, verbose_name, name, **kwargs) def get_internal_type(self): return "DecimalField" def to_python(self, value): if value is None: return value try: return decimal.Decimal(value) except decimal.InvalidOperation: msg = self.error_messages['invalid'] % str(value) raise exceptions.ValidationError(msg) def _format(self, value): if isinstance(value, basestring) or value is None: return value else: return self.format_number(value) def format_number(self, value): """ Formats a number into a string with the requisite number of digits and decimal places. """ # Method moved to django.db.backends.util. # # It is preserved because it is used by the oracle backend # (django.db.backends.oracle.query), and also for # backwards-compatibility with any external code which may have used # this method. from django.db.backends import util return util.format_number(value, self.max_digits, self.decimal_places) def get_db_prep_save(self, value, connection): return connection.ops.value_to_db_decimal(self.to_python(value), self.max_digits, self.decimal_places) def get_prep_value(self, value): return self.to_python(value) def formfield(self, **kwargs): defaults = { 'max_digits': self.max_digits, 'decimal_places': self.decimal_places, 'form_class': forms.DecimalField, } defaults.update(kwargs) return super(DecimalField, self).formfield(**defaults) class EmailField(CharField): default_validators = [validators.validate_email] description = _("E-mail address") def __init__(self, *args, **kwargs): kwargs['max_length'] = kwargs.get('max_length', 75) CharField.__init__(self, *args, **kwargs) def formfield(self, **kwargs): # As with CharField, this will cause email validation to be performed # twice. defaults = { 'form_class': forms.EmailField, } defaults.update(kwargs) return super(EmailField, self).formfield(**defaults) class FilePathField(Field): description = _("File path") def __init__(self, verbose_name=None, name=None, path='', match=None, recursive=False, **kwargs): self.path, self.match, self.recursive = path, match, recursive kwargs['max_length'] = kwargs.get('max_length', 100) Field.__init__(self, verbose_name, name, **kwargs) def get_prep_value(self, value): value = super(FilePathField, self).get_prep_value(value) if value is None: return None return smart_unicode(value) def formfield(self, **kwargs): defaults = { 'path': self.path, 'match': self.match, 'recursive': self.recursive, 'form_class': forms.FilePathField, } defaults.update(kwargs) return super(FilePathField, self).formfield(**defaults) def get_internal_type(self): return "FilePathField" class FloatField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("'%s' value must be a float."), } description = _("Floating point number") def get_prep_value(self, value): if value is None: return None return float(value) def get_internal_type(self): return "FloatField" def to_python(self, value): if value is None: return value try: return float(value) except (TypeError, ValueError): msg = self.error_messages['invalid'] % str(value) raise exceptions.ValidationError(msg) def formfield(self, **kwargs): defaults = {'form_class': forms.FloatField} defaults.update(kwargs) return super(FloatField, self).formfield(**defaults) class IntegerField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("'%s' value must be an integer."), } description = _("Integer") def get_prep_value(self, value): if value is None: return None return int(value) def get_prep_lookup(self, lookup_type, value): if ((lookup_type == 'gte' or lookup_type == 'lt') and isinstance(value, float)): value = math.ceil(value) return super(IntegerField, self).get_prep_lookup(lookup_type, value) def get_internal_type(self): return "IntegerField" def to_python(self, value): if value is None: return value try: return int(value) except (TypeError, ValueError): msg = self.error_messages['invalid'] % str(value) raise exceptions.ValidationError(msg) def formfield(self, **kwargs): defaults = {'form_class': forms.IntegerField} defaults.update(kwargs) return super(IntegerField, self).formfield(**defaults) class BigIntegerField(IntegerField): empty_strings_allowed = False description = _("Big (8 byte) integer") MAX_BIGINT = 9223372036854775807 def get_internal_type(self): return "BigIntegerField" def formfield(self, **kwargs): defaults = {'min_value': -BigIntegerField.MAX_BIGINT - 1, 'max_value': BigIntegerField.MAX_BIGINT} defaults.update(kwargs) return super(BigIntegerField, self).formfield(**defaults) class IPAddressField(Field): empty_strings_allowed = False description = _("IPv4 address") def __init__(self, *args, **kwargs): kwargs['max_length'] = 15 Field.__init__(self, *args, **kwargs) def get_prep_value(self, value): value = super(IPAddressField, self).get_prep_value(value) if value is None: return None return smart_unicode(value) def get_internal_type(self): return "IPAddressField" def formfield(self, **kwargs): defaults = {'form_class': forms.IPAddressField} defaults.update(kwargs) return super(IPAddressField, self).formfield(**defaults) class GenericIPAddressField(Field): empty_strings_allowed = True description = _("IP address") default_error_messages = {} def __init__(self, verbose_name=None, name=None, protocol='both', unpack_ipv4=False, *args, **kwargs): self.unpack_ipv4 = unpack_ipv4 self.default_validators, invalid_error_message = \ validators.ip_address_validators(protocol, unpack_ipv4) self.default_error_messages['invalid'] = invalid_error_message kwargs['max_length'] = 39 Field.__init__(self, verbose_name, name, *args, **kwargs) def get_internal_type(self): return "GenericIPAddressField" def to_python(self, value): if value and ':' in value: return clean_ipv6_address(value, self.unpack_ipv4, self.error_messages['invalid']) return value def get_db_prep_value(self, value, connection, prepared=False): if not prepared: value = self.get_prep_value(value) return value or None def get_prep_value(self, value): if value is None: return value if value and ':' in value: try: return clean_ipv6_address(value, self.unpack_ipv4) except exceptions.ValidationError: pass return smart_unicode(value) def formfield(self, **kwargs): defaults = {'form_class': forms.GenericIPAddressField} defaults.update(kwargs) return super(GenericIPAddressField, self).formfield(**defaults) class NullBooleanField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _("'%s' value must be either None, True or False."), } description = _("Boolean (Either True, False or None)") def __init__(self, *args, **kwargs): kwargs['null'] = True kwargs['blank'] = True Field.__init__(self, *args, **kwargs) def get_internal_type(self): return "NullBooleanField" def to_python(self, value): if value is None: return None if value in (True, False): return bool(value) if value in ('None',): return None if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False msg = self.error_messages['invalid'] % str(value) raise exceptions.ValidationError(msg) def get_prep_lookup(self, lookup_type, value): # Special-case handling for filters coming from a Web request (e.g. the # admin interface). Only works for scalar values (not lists). If you're # passing in a list, you might as well make things the right type when # constructing the list. if value in ('1', '0'): value = bool(int(value)) return super(NullBooleanField, self).get_prep_lookup(lookup_type, value) def get_prep_value(self, value): if value is None: return None return bool(value) def formfield(self, **kwargs): defaults = { 'form_class': forms.NullBooleanField, 'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text} defaults.update(kwargs) return super(NullBooleanField, self).formfield(**defaults) class PositiveIntegerField(IntegerField): description = _("Positive integer") def get_internal_type(self): return "PositiveIntegerField" def formfield(self, **kwargs): defaults = {'min_value': 0} defaults.update(kwargs) return super(PositiveIntegerField, self).formfield(**defaults) class PositiveSmallIntegerField(IntegerField): description = _("Positive small integer") def get_internal_type(self): return "PositiveSmallIntegerField" def formfield(self, **kwargs): defaults = {'min_value': 0} defaults.update(kwargs) return super(PositiveSmallIntegerField, self).formfield(**defaults) class SlugField(CharField): description = _("Slug (up to %(max_length)s)") def __init__(self, *args, **kwargs): kwargs['max_length'] = kwargs.get('max_length', 50) # Set db_index=True unless it's been set manually. if 'db_index' not in kwargs: kwargs['db_index'] = True super(SlugField, self).__init__(*args, **kwargs) def get_internal_type(self): return "SlugField" def formfield(self, **kwargs): defaults = {'form_class': forms.SlugField} defaults.update(kwargs) return super(SlugField, self).formfield(**defaults) class SmallIntegerField(IntegerField): description = _("Small integer") def get_internal_type(self): return "SmallIntegerField" class TextField(Field): description = _("Text") def get_internal_type(self): return "TextField" def get_prep_value(self, value): if isinstance(value, basestring) or value is None: return value return smart_unicode(value) def formfield(self, **kwargs): defaults = {'widget': forms.Textarea} defaults.update(kwargs) return super(TextField, self).formfield(**defaults) class TimeField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _(u"'%s' value has an invalid format. It must be in " u"HH:MM[:ss[.uuuuuu]] format."), 'invalid_time': _(u"'%s' value has the correct format " u"(HH:MM[:ss[.uuuuuu]]) but it is an invalid time."), } description = _("Time") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True Field.__init__(self, verbose_name, name, **kwargs) def get_internal_type(self): return "TimeField" def to_python(self, value): if value is None: return None if isinstance(value, datetime.time): return value if isinstance(value, datetime.datetime): # Not usually a good idea to pass in a datetime here (it loses # information), but this can be a side-effect of interacting with a # database backend (e.g. Oracle), so we'll be accommodating. return value.time() value = smart_str(value) try: parsed = parse_time(value) if parsed is not None: return parsed except ValueError: msg = self.error_messages['invalid_time'] % value raise exceptions.ValidationError(msg) msg = self.error_messages['invalid'] % value raise exceptions.ValidationError(msg) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.datetime.now().time() setattr(model_instance, self.attname, value) return value else: return super(TimeField, self).pre_save(model_instance, add) def get_prep_value(self, value): return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts times into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.value_to_db_time(value) def value_to_string(self, obj): val = self._get_val_from_obj(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): defaults = {'form_class': forms.TimeField} defaults.update(kwargs) return super(TimeField, self).formfield(**defaults) class URLField(CharField): description = _("URL") def __init__(self, verbose_name=None, name=None, verify_exists=False, **kwargs): kwargs['max_length'] = kwargs.get('max_length', 200) CharField.__init__(self, verbose_name, name, **kwargs) self.validators.append( validators.URLValidator(verify_exists=verify_exists)) def formfield(self, **kwargs): # As with CharField, this will cause URL validation to be performed # twice. defaults = { 'form_class': forms.URLField, } defaults.update(kwargs) return super(URLField, self).formfield(**defaults)
klnprj/testapp
django/db/models/fields/__init__.py
Python
bsd-3-clause
47,219
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CONTENT_CHILD_REQUEST_EXTRA_DATA_H_ #define CONTENT_CHILD_REQUEST_EXTRA_DATA_H_ #include "base/compiler_specific.h" #include "content/common/content_export.h" #include "content/public/common/page_transition_types.h" #include "third_party/WebKit/public/platform/WebString.h" #include "third_party/WebKit/public/platform/WebURLRequest.h" #include "third_party/WebKit/public/web/WebPageVisibilityState.h" namespace content { // Can be used by callers to store extra data on every ResourceRequest // which will be incorporated into the ResourceHostMsg_Request message // sent by ResourceDispatcher. class CONTENT_EXPORT RequestExtraData : public NON_EXPORTED_BASE(blink::WebURLRequest::ExtraData) { public: RequestExtraData(); virtual ~RequestExtraData(); blink::WebPageVisibilityState visibility_state() const { return visibility_state_; } void set_visibility_state(blink::WebPageVisibilityState visibility_state) { visibility_state_ = visibility_state; } int render_frame_id() const { return render_frame_id_; } void set_render_frame_id(int render_frame_id) { render_frame_id_ = render_frame_id; } bool is_main_frame() const { return is_main_frame_; } void set_is_main_frame(bool is_main_frame) { is_main_frame_ = is_main_frame; } GURL frame_origin() const { return frame_origin_; } void set_frame_origin(const GURL& frame_origin) { frame_origin_ = frame_origin; } bool parent_is_main_frame() const { return parent_is_main_frame_; } void set_parent_is_main_frame(bool parent_is_main_frame) { parent_is_main_frame_ = parent_is_main_frame; } int parent_render_frame_id() const { return parent_render_frame_id_; } void set_parent_render_frame_id(int parent_render_frame_id) { parent_render_frame_id_ = parent_render_frame_id; } bool allow_download() const { return allow_download_; } void set_allow_download(bool allow_download) { allow_download_ = allow_download; } PageTransition transition_type() const { return transition_type_; } void set_transition_type(PageTransition transition_type) { transition_type_ = transition_type; } bool should_replace_current_entry() const { return should_replace_current_entry_; } void set_should_replace_current_entry( bool should_replace_current_entry) { should_replace_current_entry_ = should_replace_current_entry; } int transferred_request_child_id() const { return transferred_request_child_id_; } void set_transferred_request_child_id( int transferred_request_child_id) { transferred_request_child_id_ = transferred_request_child_id; } int transferred_request_request_id() const { return transferred_request_request_id_; } void set_transferred_request_request_id( int transferred_request_request_id) { transferred_request_request_id_ = transferred_request_request_id; } int service_worker_provider_id() const { return service_worker_provider_id_; } void set_service_worker_provider_id( int service_worker_provider_id) { service_worker_provider_id_ = service_worker_provider_id; } // |custom_user_agent| is used to communicate an overriding custom user agent // to |RenderViewImpl::willSendRequest()|; set to a null string to indicate no // override and an empty string to indicate that there should be no user // agent. const blink::WebString& custom_user_agent() const { return custom_user_agent_; } void set_custom_user_agent( const blink::WebString& custom_user_agent) { custom_user_agent_ = custom_user_agent; } private: blink::WebPageVisibilityState visibility_state_; int render_frame_id_; bool is_main_frame_; GURL frame_origin_; bool parent_is_main_frame_; int parent_render_frame_id_; bool allow_download_; PageTransition transition_type_; bool should_replace_current_entry_; int transferred_request_child_id_; int transferred_request_request_id_; int service_worker_provider_id_; blink::WebString custom_user_agent_; DISALLOW_COPY_AND_ASSIGN(RequestExtraData); }; } // namespace content #endif // CONTENT_CHILD_REQUEST_EXTRA_DATA_H_
ondra-novak/chromium.src
content/child/request_extra_data.h
C
bsd-3-clause
4,312
<?php # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/dialogflow/v2/session.proto namespace Google\Cloud\Dialogflow\V2; use Google\Protobuf\Internal\GPBType; use Google\Protobuf\Internal\RepeatedField; use Google\Protobuf\Internal\GPBUtil; /** * Represents the result of conversational query or event processing. * * Generated from protobuf message <code>google.cloud.dialogflow.v2.QueryResult</code> */ class QueryResult extends \Google\Protobuf\Internal\Message { /** * The original conversational query text: * - If natural language text was provided as input, `query_text` contains * a copy of the input. * - If natural language speech audio was provided as input, `query_text` * contains the speech recognition result. If speech recognizer produced * multiple alternatives, a particular one is picked. * - If an event was provided as input, `query_text` is not set. * * Generated from protobuf field <code>string query_text = 1;</code> */ private $query_text = ''; /** * The language that was triggered during intent detection. * See [Language Support](https://dialogflow.com/docs/reference/language) * for a list of the currently supported language codes. * * Generated from protobuf field <code>string language_code = 15;</code> */ private $language_code = ''; /** * The Speech recognition confidence between 0.0 and 1.0. A higher number * indicates an estimated greater likelihood that the recognized words are * correct. The default of 0.0 is a sentinel value indicating that confidence * was not set. * You should not rely on this field as it isn't guaranteed to be accurate, or * even set. In particular this field isn't set in Webhook calls and for * StreamingDetectIntent since the streaming endpoint has separate confidence * estimates per portion of the audio in StreamingRecognitionResult. * * Generated from protobuf field <code>float speech_recognition_confidence = 2;</code> */ private $speech_recognition_confidence = 0.0; /** * The action name from the matched intent. * * Generated from protobuf field <code>string action = 3;</code> */ private $action = ''; /** * The collection of extracted parameters. * * Generated from protobuf field <code>.google.protobuf.Struct parameters = 4;</code> */ private $parameters = null; /** * This field is set to: * - `false` if the matched intent has required parameters and not all of * the required parameter values have been collected. * - `true` if all required parameter values have been collected, or if the * matched intent doesn't contain any required parameters. * * Generated from protobuf field <code>bool all_required_params_present = 5;</code> */ private $all_required_params_present = false; /** * The text to be pronounced to the user or shown on the screen. * * Generated from protobuf field <code>string fulfillment_text = 6;</code> */ private $fulfillment_text = ''; /** * The collection of rich messages to present to the user. * * Generated from protobuf field <code>repeated .google.cloud.dialogflow.v2.Intent.Message fulfillment_messages = 7;</code> */ private $fulfillment_messages; /** * If the query was fulfilled by a webhook call, this field is set to the * value of the `source` field returned in the webhook response. * * Generated from protobuf field <code>string webhook_source = 8;</code> */ private $webhook_source = ''; /** * If the query was fulfilled by a webhook call, this field is set to the * value of the `payload` field returned in the webhook response. * * Generated from protobuf field <code>.google.protobuf.Struct webhook_payload = 9;</code> */ private $webhook_payload = null; /** * The collection of output contexts. If applicable, * `output_contexts.parameters` contains entries with name * `<parameter name>.original` containing the original parameter values * before the query. * * Generated from protobuf field <code>repeated .google.cloud.dialogflow.v2.Context output_contexts = 10;</code> */ private $output_contexts; /** * The intent that matched the conversational query. Some, not * all fields are filled in this message, including but not limited to: * `name`, `display_name` and `webhook_state`. * * Generated from protobuf field <code>.google.cloud.dialogflow.v2.Intent intent = 11;</code> */ private $intent = null; /** * The intent detection confidence. Values range from 0.0 * (completely uncertain) to 1.0 (completely certain). * * Generated from protobuf field <code>float intent_detection_confidence = 12;</code> */ private $intent_detection_confidence = 0.0; /** * The free-form diagnostic info. For example, this field * could contain webhook call latency. * * Generated from protobuf field <code>.google.protobuf.Struct diagnostic_info = 14;</code> */ private $diagnostic_info = null; public function __construct() { \GPBMetadata\Google\Cloud\Dialogflow\V2\Session::initOnce(); parent::__construct(); } /** * The original conversational query text: * - If natural language text was provided as input, `query_text` contains * a copy of the input. * - If natural language speech audio was provided as input, `query_text` * contains the speech recognition result. If speech recognizer produced * multiple alternatives, a particular one is picked. * - If an event was provided as input, `query_text` is not set. * * Generated from protobuf field <code>string query_text = 1;</code> * @return string */ public function getQueryText() { return $this->query_text; } /** * The original conversational query text: * - If natural language text was provided as input, `query_text` contains * a copy of the input. * - If natural language speech audio was provided as input, `query_text` * contains the speech recognition result. If speech recognizer produced * multiple alternatives, a particular one is picked. * - If an event was provided as input, `query_text` is not set. * * Generated from protobuf field <code>string query_text = 1;</code> * @param string $var * @return $this */ public function setQueryText($var) { GPBUtil::checkString($var, True); $this->query_text = $var; return $this; } /** * The language that was triggered during intent detection. * See [Language Support](https://dialogflow.com/docs/reference/language) * for a list of the currently supported language codes. * * Generated from protobuf field <code>string language_code = 15;</code> * @return string */ public function getLanguageCode() { return $this->language_code; } /** * The language that was triggered during intent detection. * See [Language Support](https://dialogflow.com/docs/reference/language) * for a list of the currently supported language codes. * * Generated from protobuf field <code>string language_code = 15;</code> * @param string $var * @return $this */ public function setLanguageCode($var) { GPBUtil::checkString($var, True); $this->language_code = $var; return $this; } /** * The Speech recognition confidence between 0.0 and 1.0. A higher number * indicates an estimated greater likelihood that the recognized words are * correct. The default of 0.0 is a sentinel value indicating that confidence * was not set. * You should not rely on this field as it isn't guaranteed to be accurate, or * even set. In particular this field isn't set in Webhook calls and for * StreamingDetectIntent since the streaming endpoint has separate confidence * estimates per portion of the audio in StreamingRecognitionResult. * * Generated from protobuf field <code>float speech_recognition_confidence = 2;</code> * @return float */ public function getSpeechRecognitionConfidence() { return $this->speech_recognition_confidence; } /** * The Speech recognition confidence between 0.0 and 1.0. A higher number * indicates an estimated greater likelihood that the recognized words are * correct. The default of 0.0 is a sentinel value indicating that confidence * was not set. * You should not rely on this field as it isn't guaranteed to be accurate, or * even set. In particular this field isn't set in Webhook calls and for * StreamingDetectIntent since the streaming endpoint has separate confidence * estimates per portion of the audio in StreamingRecognitionResult. * * Generated from protobuf field <code>float speech_recognition_confidence = 2;</code> * @param float $var * @return $this */ public function setSpeechRecognitionConfidence($var) { GPBUtil::checkFloat($var); $this->speech_recognition_confidence = $var; return $this; } /** * The action name from the matched intent. * * Generated from protobuf field <code>string action = 3;</code> * @return string */ public function getAction() { return $this->action; } /** * The action name from the matched intent. * * Generated from protobuf field <code>string action = 3;</code> * @param string $var * @return $this */ public function setAction($var) { GPBUtil::checkString($var, True); $this->action = $var; return $this; } /** * The collection of extracted parameters. * * Generated from protobuf field <code>.google.protobuf.Struct parameters = 4;</code> * @return \Google\Protobuf\Struct */ public function getParameters() { return $this->parameters; } /** * The collection of extracted parameters. * * Generated from protobuf field <code>.google.protobuf.Struct parameters = 4;</code> * @param \Google\Protobuf\Struct $var * @return $this */ public function setParameters($var) { GPBUtil::checkMessage($var, \Google\Protobuf\Struct::class); $this->parameters = $var; return $this; } /** * This field is set to: * - `false` if the matched intent has required parameters and not all of * the required parameter values have been collected. * - `true` if all required parameter values have been collected, or if the * matched intent doesn't contain any required parameters. * * Generated from protobuf field <code>bool all_required_params_present = 5;</code> * @return bool */ public function getAllRequiredParamsPresent() { return $this->all_required_params_present; } /** * This field is set to: * - `false` if the matched intent has required parameters and not all of * the required parameter values have been collected. * - `true` if all required parameter values have been collected, or if the * matched intent doesn't contain any required parameters. * * Generated from protobuf field <code>bool all_required_params_present = 5;</code> * @param bool $var * @return $this */ public function setAllRequiredParamsPresent($var) { GPBUtil::checkBool($var); $this->all_required_params_present = $var; return $this; } /** * The text to be pronounced to the user or shown on the screen. * * Generated from protobuf field <code>string fulfillment_text = 6;</code> * @return string */ public function getFulfillmentText() { return $this->fulfillment_text; } /** * The text to be pronounced to the user or shown on the screen. * * Generated from protobuf field <code>string fulfillment_text = 6;</code> * @param string $var * @return $this */ public function setFulfillmentText($var) { GPBUtil::checkString($var, True); $this->fulfillment_text = $var; return $this; } /** * The collection of rich messages to present to the user. * * Generated from protobuf field <code>repeated .google.cloud.dialogflow.v2.Intent.Message fulfillment_messages = 7;</code> * @return \Google\Protobuf\Internal\RepeatedField */ public function getFulfillmentMessages() { return $this->fulfillment_messages; } /** * The collection of rich messages to present to the user. * * Generated from protobuf field <code>repeated .google.cloud.dialogflow.v2.Intent.Message fulfillment_messages = 7;</code> * @param \Google\Cloud\Dialogflow\V2\Intent_Message[]|\Google\Protobuf\Internal\RepeatedField $var * @return $this */ public function setFulfillmentMessages($var) { $arr = GPBUtil::checkRepeatedField($var, \Google\Protobuf\Internal\GPBType::MESSAGE, \Google\Cloud\Dialogflow\V2\Intent_Message::class); $this->fulfillment_messages = $arr; return $this; } /** * If the query was fulfilled by a webhook call, this field is set to the * value of the `source` field returned in the webhook response. * * Generated from protobuf field <code>string webhook_source = 8;</code> * @return string */ public function getWebhookSource() { return $this->webhook_source; } /** * If the query was fulfilled by a webhook call, this field is set to the * value of the `source` field returned in the webhook response. * * Generated from protobuf field <code>string webhook_source = 8;</code> * @param string $var * @return $this */ public function setWebhookSource($var) { GPBUtil::checkString($var, True); $this->webhook_source = $var; return $this; } /** * If the query was fulfilled by a webhook call, this field is set to the * value of the `payload` field returned in the webhook response. * * Generated from protobuf field <code>.google.protobuf.Struct webhook_payload = 9;</code> * @return \Google\Protobuf\Struct */ public function getWebhookPayload() { return $this->webhook_payload; } /** * If the query was fulfilled by a webhook call, this field is set to the * value of the `payload` field returned in the webhook response. * * Generated from protobuf field <code>.google.protobuf.Struct webhook_payload = 9;</code> * @param \Google\Protobuf\Struct $var * @return $this */ public function setWebhookPayload($var) { GPBUtil::checkMessage($var, \Google\Protobuf\Struct::class); $this->webhook_payload = $var; return $this; } /** * The collection of output contexts. If applicable, * `output_contexts.parameters` contains entries with name * `<parameter name>.original` containing the original parameter values * before the query. * * Generated from protobuf field <code>repeated .google.cloud.dialogflow.v2.Context output_contexts = 10;</code> * @return \Google\Protobuf\Internal\RepeatedField */ public function getOutputContexts() { return $this->output_contexts; } /** * The collection of output contexts. If applicable, * `output_contexts.parameters` contains entries with name * `<parameter name>.original` containing the original parameter values * before the query. * * Generated from protobuf field <code>repeated .google.cloud.dialogflow.v2.Context output_contexts = 10;</code> * @param \Google\Cloud\Dialogflow\V2\Context[]|\Google\Protobuf\Internal\RepeatedField $var * @return $this */ public function setOutputContexts($var) { $arr = GPBUtil::checkRepeatedField($var, \Google\Protobuf\Internal\GPBType::MESSAGE, \Google\Cloud\Dialogflow\V2\Context::class); $this->output_contexts = $arr; return $this; } /** * The intent that matched the conversational query. Some, not * all fields are filled in this message, including but not limited to: * `name`, `display_name` and `webhook_state`. * * Generated from protobuf field <code>.google.cloud.dialogflow.v2.Intent intent = 11;</code> * @return \Google\Cloud\Dialogflow\V2\Intent */ public function getIntent() { return $this->intent; } /** * The intent that matched the conversational query. Some, not * all fields are filled in this message, including but not limited to: * `name`, `display_name` and `webhook_state`. * * Generated from protobuf field <code>.google.cloud.dialogflow.v2.Intent intent = 11;</code> * @param \Google\Cloud\Dialogflow\V2\Intent $var * @return $this */ public function setIntent($var) { GPBUtil::checkMessage($var, \Google\Cloud\Dialogflow\V2\Intent::class); $this->intent = $var; return $this; } /** * The intent detection confidence. Values range from 0.0 * (completely uncertain) to 1.0 (completely certain). * * Generated from protobuf field <code>float intent_detection_confidence = 12;</code> * @return float */ public function getIntentDetectionConfidence() { return $this->intent_detection_confidence; } /** * The intent detection confidence. Values range from 0.0 * (completely uncertain) to 1.0 (completely certain). * * Generated from protobuf field <code>float intent_detection_confidence = 12;</code> * @param float $var * @return $this */ public function setIntentDetectionConfidence($var) { GPBUtil::checkFloat($var); $this->intent_detection_confidence = $var; return $this; } /** * The free-form diagnostic info. For example, this field * could contain webhook call latency. * * Generated from protobuf field <code>.google.protobuf.Struct diagnostic_info = 14;</code> * @return \Google\Protobuf\Struct */ public function getDiagnosticInfo() { return $this->diagnostic_info; } /** * The free-form diagnostic info. For example, this field * could contain webhook call latency. * * Generated from protobuf field <code>.google.protobuf.Struct diagnostic_info = 14;</code> * @param \Google\Protobuf\Struct $var * @return $this */ public function setDiagnosticInfo($var) { GPBUtil::checkMessage($var, \Google\Protobuf\Struct::class); $this->diagnostic_info = $var; return $this; } }
googleapis/proto-client-php
src/Google/Cloud/Dialogflow/V2/QueryResult.php
PHP
bsd-3-clause
19,237
package main import ( "fmt" "github.com/flynn/go-docopt" ) func init() { register("install", runInstaller, `usage: flynn install`) } func runInstaller(args *docopt.Args) error { fmt.Printf("DEPRECATED: `flynn install` has been deprecated.\nRefer to https://flynn.io/docs/installation for current installation instructions.\nAn unsupported and unmaintained snapshot of the installer binaries at the time of deprecation is available at https://dl.flynn.io/flynn-install-deprecated.tar.gz\n") return nil }
flynn/flynn
cli/install.go
GO
bsd-3-clause
512
// according to the offical reference (EN) // https://ichigojam.net/IchigoJam-en.html Prism.languages.ichigojam = { 'comment': /(?:\B'|REM)(?:[^\n\r]*)/i, 'string': { pattern: /"(?:""|[!#$%&'()*,\/:;<=>?^\w +\-.])*"/, greedy: true }, 'number': /\B#[0-9A-F]+|\B`[01]+|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:E[+-]?\d+)?/i, 'keyword': /\b(?:BEEP|BPS|CASE|CLEAR|CLK|CLO|CLP|CLS|CLT|CLV|CONT|COPY|ELSE|END|FILE|FILES|FOR|GOSUB|GOTO|GSB|IF|INPUT|KBD|LED|LET|LIST|LOAD|LOCATE|LRUN|NEW|NEXT|OUT|PLAY|POKE|PRINT|PWM|REM|RENUM|RESET|RETURN|RIGHT|RTN|RUN|SAVE|SCROLL|SLEEP|SRND|STEP|STOP|SUB|TEMPO|THEN|TO|UART|VIDEO|WAIT)(?:\$|\b)/i, 'function': /\b(?:ABS|ANA|ASC|BIN|BTN|DEC|END|FREE|HELP|HEX|I2CR|I2CW|IN|INKEY|LEN|LINE|PEEK|RND|SCR|SOUND|STR|TICK|USR|VER|VPEEK|ZER)(?:\$|\b)/i, 'label': /(?:\B@\S+)/, 'operator': /<[=>]?|>=?|\|\||&&|[+\-*\/=|&^~!]|\b(?:AND|NOT|OR)\b/i, 'punctuation': /[\[,;:()\]]/ };
netbek/chrys
demo/vendor/prismjs/components/prism-ichigojam.js
JavaScript
bsd-3-clause
899
(function($){ $(function(){ var base_url = window.location.origin; var d = document; var safari = (navigator.userAgent.toLowerCase().indexOf() != -1) ? true : false; var gebtn = function(parEl,child) { return parEl.getElementsByTagName(child); }; onload = function() { var body = gebtn(d,'body')[0]; body.className = body.className && body.className != '' ? body.className + ' has-js' : 'has-js'; if (!d.getElementById || !d.createTextNode) return; var ls = gebtn(d,'label'); for (var i = 0; i < ls.length; i++) { var l = ls[i]; if (l.className.indexOf('label_') == -1) continue; var inp = gebtn(l,'input')[0]; if (l.className == 'label_check') { l.className = (safari && inp.checked == true || inp.checked) ? 'label_check c_on' : 'label_check c_off'; l.onclick = check_it; }; if (l.className == 'label_radio') { l.className = (safari && inp.checked == true || inp.checked) ? 'label_radio r_on' : 'label_radio r_off'; l.onclick = turn_radio; }; }; }; var check_it = function() { var inp = gebtn(this,'input')[0]; if (this.className == 'label_check c_off' || (!safari && inp.checked)) { this.className = 'label_check c_on'; if (safari) inp.click(); } else { this.className = 'label_check c_off'; if (safari) inp.click(); }; }; var turn_radio = function() { var inp = gebtn(this,'input')[0]; if (this.className == 'label_radio r_off' || inp.checked) { var ls = gebtn(this.parentNode,'label'); for (var i = 0; i < ls.length; i++) { var l = ls[i]; if (l.className.indexOf('label_radio') == -1) continue; l.className = 'label_radio r_off'; }; this.className = 'label_radio r_on'; if (safari) inp.click(); } else { this.className = 'label_radio r_off'; if (safari) inp.click(); }; }; $("#web").click(function(){ if($(this).children("input[type='checkbox']").is(":checked")){ $(this).children("input[type='checkbox']").attr("checked", false); $(this).removeClass("c_on"); $("#webPart").hide(); }else{ $(this).children("input[type='checkbox']").attr("checked", true); $(this).addClass("c_on"); $("#webPart").show(); //$("#webUrl0").hide(); var tempName = location.href.split("/"); var encoded = ""; var i; for (i=0; i<tempName[5].length;i++) { var a = tempName[5].charCodeAt(i); var b = a ^ 123; // bitwise XOR with any number, e.g. 123 encoded = encoded+String.fromCharCode(b); } $.ajax({ type : "POST", url : base_url+"/frontend/savencontinue", data : {'tName' : encoded}, success : function(data) { var hold = data.split("||"); var jsObject = JSON.parse(hold[1]); if(jsObject != null) { for(i=0;i<jsObject.length;i++) { if(jsObject[i].new_template_name != "") { $("#web").attr('class','span_check c_on'); $("#web").children('input').attr('checked', true); $("#webPart").show(); $("#nameCam").text(jsObject[i].new_template_name); } } } } }); }; }); $("#facebook").click(function(){ if($(this).children("input[type='checkbox']").is(":checked")){ $(this).children("input[type='checkbox']").attr("checked", false); $(this).removeClass("c_on"); $("#noConn").hide(); $("#connBtn").hide(); $("#fbPages").hide(); }else{ $("#facebook").attr('class','span_check c_on'); $("#facebook").children('input').attr('checked', true); FB.init({ appId : '507520019301583', cookie : true, // enable cookies to allow the server to access // the session xfbml : true, // parse social plugins on this page version : 'v2.0' // use version 2.0 }); FB.getLoginStatus(function(response) { console.log(response); if (response.status === 'connected') { $("#noConn").hide(); $("#connBtn").hide(); FB.api('/me/accounts', function(responsePages) { //alert(JSON.stringify(responsePages)); console.log(responsePages); $("#myItem option").remove(); page1 = '<option value="SELECT YOUR PAGE">SELECT YOUR PAGE</option>'; $("#myItem").append(page1); for(var i=0;i<responsePages.data.length;i++) { pages = '<option value="'+responsePages.data[i].id+'|--||--|'+responsePages.data[i].access_token+'">'+responsePages.data[i].name+'</option>'; $("#myItem").append(pages); } $("#fbPages").show(); }); } else if (response.status === 'not_authorized') { alert("not authorized"); } else { $("#noConn").show(); $("#connBtn").show(); } },{perms: 'manage_pages'}); }; }); }); })(jQuery)
Baishakhidelgence/testzend
public/js3/checkNradio.js
JavaScript
bsd-3-clause
7,017
#ifndef TAIJU_TRIE_CONVERTER_FACTORY_H #define TAIJU_TRIE_CONVERTER_FACTORY_H #include "trie-converter-base.h" namespace taiju { class TrieConverterFactory { public: static TrieConverterBase *Create(const BuilderConfig &config); static TrieConverterBase *Create(TrieType type); private: // Disallows instantiation. TrieConverterFactory(); ~TrieConverterFactory(); // Disallows copies. TrieConverterFactory(const TrieConverterFactory &); TrieConverterFactory &operator=(const TrieConverterFactory &); }; } // namespace taiju #endif // TAIJU_TRIE_CONVERTER_FACTORY_H
rockeet/taiju
include/taiju/trie-converter-factory.h
C
bsd-3-clause
582
from __future__ import print_function import numpy as nm try: import matplotlib.pyplot as plt import matplotlib as mpl except (ImportError, RuntimeError): plt = mpl = None #print 'matplotlib import failed!' from sfepy.base.base import output, pause def spy(mtx, eps=None, color='b', **kwargs): """ Show sparsity structure of a `scipy.sparse` matrix. """ aux = mtx.tocoo() ij, val = nm.concatenate((aux.row[:,nm.newaxis], aux.col[:,nm.newaxis]), 1), aux.data n_item = aux.getnnz() n_row, n_col = aux.shape if eps is not None: output('using eps =', eps) ij = nm.compress(nm.absolute(val) > eps, ij, 0) n_item = ij.shape[0] else: output('showing all') output('n_item:', n_item) if n_item: args = {'marker' : '.'} args.update(kwargs) plt.plot(ij[:,1], ij[:,0], color, linestyle='None', **args) plt.axis('image') plt.axis([-0.5, n_row+0.5, -0.5, n_col+0.5]) plt.xlabel(r'%d x %d: %d nnz, %.2f%% fill' % (n_row, n_col, n_item, 100. * n_item / (float(n_row) * float(n_col)))) ax = plt.gca() ax.set_ylim(ax.get_ylim()[::-1]) def spy_and_show(mtx, **kwargs): spy(mtx, **kwargs) plt.show() ## # 13.12.2005, c def print_matrix_diff(title, legend, mtx1, mtx2, mtx_da, mtx_dr, iis): print('%s: ir, ic, %s, %s, adiff, rdiff' % ((title,) + tuple(legend))) aux = mtx_da.copy().tocsc() # mtx_da should be CSC, cast for safety anyway. aux.data = nm.ones(mtx_da.data.shape[0]) ics, irs = aux.nonzero() for ii in iis: ir, ic = irs[ii], ics[ii] print('%5d %5d %11.4e %11.4e %9.2e %9.2e' % (ir, ic, mtx1[ir,ic], mtx2[ir,ic], mtx_da[ir,ic], mtx_dr[ir,ic])) print('total: %d' % len(iis)) ## # 13.12.2005, c # 14.12.2005 # 15.12.2005 # 18.07.2007 def plot_matrix_diff(mtx1, mtx2, delta, legend, mode): eps = 1e-16 print("min", legend[0] , legend[1], ":", nm.amin(mtx1.data), nm.amin(mtx2.data)) print("max", legend[0] , legend[1], ":", nm.amax(mtx1.data), nm.amax(mtx2.data)) mtx_da = mtx1.copy() # To preserve structure of mtx1. mtx_da.data[:] = nm.abs(mtx1.data - mtx2.data) mtx_dr = mtx_da.copy() mtx_dr.data[:] = -1 iin = nm.where(nm.abs(mtx1.data) > eps)[0] mtx_dr.data[iin] = mtx_da.data[iin] / nm.abs(mtx1.data[iin]) print("err abs min max:", nm.amin(mtx_da.data), nm.amax(mtx_da.data)) print("err rel min max:", nm.amin(mtx_dr.data), nm.amax(mtx_dr.data)) epsilon = max(1e-5, 10 * delta) print('epsilon:', epsilon) pause() ija = nm.where(mtx_da.data > epsilon)[0] print_matrix_diff('--- absolute diff', legend, mtx1, mtx2, mtx_da, mtx_dr, ija) pause() iin = nm.where(nm.abs(mtx1.data) > epsilon)[0] ij = nm.where(nm.abs(mtx_dr.data[iin]) > epsilon)[0] ij = iin[ij] print_matrix_diff('--- relative diff', legend, mtx1, mtx2, mtx_da, mtx_dr, ij) pause() ijb = nm.intersect1d(ija, ij) print_matrix_diff('--- a-r', legend, mtx1, mtx2, mtx_da, mtx_dr, ijb) pause() ii = nm.argsort(mtx_dr.data[ijb]) n_s = min(20, len(ii)) ijbs = ijb[ii[-1:-n_s-1:-1]] print_matrix_diff('--- a-r 20 biggest (by r)', legend, mtx1, mtx2, mtx_da, mtx_dr, ijbs) pause() if mode < 2: return h = 100 plt.figure(h); plt.clf() plt.axes([0.04, 0.6, 0.3, 0.3], frameon=True) spy(mtx_da, epsilon) plt.title('absolute diff') plt.axes([0.68, 0.6, 0.3, 0.3], frameon=True) iia = nm.where(mtx_dr.data)[0] mtx_dr.data[nm.setdiff1d(iia, iin)] = 0.0 spy(mtx_dr, epsilon) plt.title('relative diff') plt.axes([0.36, 0.6, 0.3, 0.3], frameon=True) mtx = mtx_dr.copy() mtx.data[:] = 0.0 ii = nm.intersect1d(nm.where(mtx_dr.data > epsilon)[0], nm.where(mtx_da.data > epsilon)[0]) mtx.data[ii] = 1.0 spy(mtx, epsilon) plt.title('a-r intersection') plt.axes([0.04, 0.08, 0.42, 0.42], frameon=True) spy(mtx1, epsilon) plt.title(legend[0]) plt.axes([0.54, 0.08, 0.42, 0.42], frameon=True) spy(mtx2, epsilon) plt.title(legend[1]) plt.show() ## # 02.05.2006, c def set_axes_font_size(ax, size): labels = ax.get_xticklabels() + ax.get_yticklabels() for label in labels: label.set_size(size) ## # 27.09.2006, c def font_size(size): return mpl.font_manager.FontProperties(size=size) ## # 28.08.2007, c def iplot(*args, **kwargs): plt.ion() plt.plot(*args, **kwargs) plt.draw() plt.ioff() pause()
vlukes/sfepy
sfepy/base/plotutils.py
Python
bsd-3-clause
4,706
--- title: Y combinator --- ## Y combinator This is a stub. <a href='https://github.com/freecodecamp/guides/tree/master/src/pages/certifications/coding-interview-prep/rosetta-code/y-combinator/index.md' target='_blank' rel='nofollow'>Help our community expand it</a>. <a href='https://github.com/freecodecamp/guides/blob/master/README.md' target='_blank' rel='nofollow'>This quick style guide will help ensure your pull request gets accepted</a>. <!-- The article goes here, in GitHub-flavored Markdown. Feel free to add YouTube videos, images, and CodePen/JSBin embeds -->
otavioarc/freeCodeCamp
guide/english/certifications/coding-interview-prep/rosetta-code/y-combinator/index.md
Markdown
bsd-3-clause
578
--- title: Adjust the Width of an Element Using the width Property --- ## Adjust the Width of an Element Using the width Property This is a stub. <a href='https://github.com/freecodecamp/guides/tree/master/src/pages/certifications/responsive-web-design/applied-visual-design/adjust-the-width-of-an-element-using-the-width-property/index.md' target='_blank' rel='nofollow'>Help our community expand it</a>. <a href='https://github.com/freecodecamp/guides/blob/master/README.md' target='_blank' rel='nofollow'>This quick style guide will help ensure your pull request gets accepted</a>. <!-- The article goes here, in GitHub-flavored Markdown. Feel free to add YouTube videos, images, and CodePen/JSBin embeds -->
otavioarc/freeCodeCamp
guide/english/certifications/responsive-web-design/applied-visual-design/adjust-the-width-of-an-element-using-the-width-property/index.md
Markdown
bsd-3-clause
716
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_UI_AUTOFILL_AUTOFILL_POPUP_CONTROLLER_IMPL_H_ #define CHROME_BROWSER_UI_AUTOFILL_AUTOFILL_POPUP_CONTROLLER_IMPL_H_ #include "base/gtest_prod_util.h" #include "base/i18n/rtl.h" #include "base/memory/weak_ptr.h" #include "base/strings/string16.h" #include "chrome/browser/ui/autofill/autofill_popup_controller.h" #include "chrome/browser/ui/autofill/popup_controller_common.h" #include "ui/gfx/font_list.h" #include "ui/gfx/geometry/rect.h" #include "ui/gfx/geometry/rect_f.h" namespace autofill { class AutofillPopupDelegate; class AutofillPopupView; // This class is a controller for an AutofillPopupView. It implements // AutofillPopupController to allow calls from AutofillPopupView. The // other, public functions are available to its instantiator. class AutofillPopupControllerImpl : public AutofillPopupController { public: // Creates a new |AutofillPopupControllerImpl|, or reuses |previous| if the // construction arguments are the same. |previous| may be invalidated by this // call. The controller will listen for keyboard input routed to // |web_contents| while the popup is showing, unless |web_contents| is NULL. static base::WeakPtr<AutofillPopupControllerImpl> GetOrCreate( base::WeakPtr<AutofillPopupControllerImpl> previous, base::WeakPtr<AutofillPopupDelegate> delegate, content::WebContents* web_contents, gfx::NativeView container_view, const gfx::RectF& element_bounds, base::i18n::TextDirection text_direction); // Shows the popup, or updates the existing popup with the given values. void Show(const std::vector<autofill::Suggestion>& suggestions); // Updates the data list values currently shown with the popup. void UpdateDataListValues(const std::vector<base::string16>& values, const std::vector<base::string16>& labels); // Hides the popup and destroys the controller. This also invalidates // |delegate_|. void Hide() override; // Invoked when the view was destroyed by by someone other than this class. void ViewDestroyed() override; bool HandleKeyPressEvent(const content::NativeWebKeyboardEvent& event); // Tells the view to capture mouse events. Must be called before |Show()|. void set_hide_on_outside_click(bool hide_on_outside_click); protected: FRIEND_TEST_ALL_PREFIXES(AutofillExternalDelegateBrowserTest, CloseWidgetAndNoLeaking); FRIEND_TEST_ALL_PREFIXES(AutofillPopupControllerUnitTest, ProperlyResetController); AutofillPopupControllerImpl(base::WeakPtr<AutofillPopupDelegate> delegate, content::WebContents* web_contents, gfx::NativeView container_view, const gfx::RectF& element_bounds, base::i18n::TextDirection text_direction); ~AutofillPopupControllerImpl() override; // AutofillPopupController implementation. void UpdateBoundsAndRedrawPopup() override; void SetSelectionAtPoint(const gfx::Point& point) override; bool AcceptSelectedLine() override; void SelectionCleared() override; void AcceptSuggestion(size_t index) override; int GetIconResourceID(const base::string16& resource_name) const override; bool IsWarning(size_t index) const override; gfx::Rect GetRowBounds(size_t index) override; void SetPopupBounds(const gfx::Rect& bounds) override; const gfx::Rect& popup_bounds() const override; gfx::NativeView container_view() override; const gfx::RectF& element_bounds() const override; bool IsRTL() const override; size_t GetLineCount() const override; const autofill::Suggestion& GetSuggestionAt(size_t row) const override; const base::string16& GetElidedValueAt(size_t row) const override; const base::string16& GetElidedLabelAt(size_t row) const override; #if !defined(OS_ANDROID) const gfx::FontList& GetValueFontListForRow(size_t index) const override; const gfx::FontList& GetLabelFontList() const override; #endif int selected_line() const override; content::WebContents* web_contents(); // Change which line is currently selected by the user. void SetSelectedLine(int selected_line); // Increase the selected line by 1, properly handling wrapping. void SelectNextLine(); // Decrease the selected line by 1, properly handling wrapping. void SelectPreviousLine(); // The user has removed a suggestion. bool RemoveSelectedLine(); // Convert a y-coordinate to the closest line. int LineFromY(int y); // Returns the height of a row depending on its type. int GetRowHeightFromId(int identifier) const; // Returns true if the given id refers to an element that can be accepted. bool CanAccept(int id); // Returns true if the popup still has non-options entries to show the user. bool HasSuggestions(); // Set the Autofill entry values. Exposed to allow tests to set these values // without showing the popup. void SetValues(const std::vector<autofill::Suggestion>& suggestions); AutofillPopupView* view() { return view_; } // |view_| pass throughs (virtual for testing). virtual void ShowView(); virtual void InvalidateRow(size_t row); // Protected so tests can access. #if !defined(OS_ANDROID) // Calculates the desired width of the popup based on its contents. int GetDesiredPopupWidth() const; // Calculates the desired height of the popup based on its contents. int GetDesiredPopupHeight() const; // Calculate the width of the row, excluding all the text. This provides // the size of the row that won't be reducible (since all the text can be // elided if there isn't enough space). int RowWidthWithoutText(int row) const; #endif base::WeakPtr<AutofillPopupControllerImpl> GetWeakPtr(); // Contains common popup functionality such as popup layout. Protected for // testing. scoped_ptr<PopupControllerCommon> controller_common_; private: // Clear the internal state of the controller. This is needed to ensure that // when the popup is reused it doesn't leak values between uses. void ClearState(); #if !defined(OS_ANDROID) // Calculates and sets the bounds of the popup, including placing it properly // to prevent it from going off the screen. void UpdatePopupBounds(); #endif AutofillPopupView* view_; // Weak reference. base::WeakPtr<AutofillPopupDelegate> delegate_; // The bounds of the Autofill popup. gfx::Rect popup_bounds_; // The text direction of the popup. base::i18n::TextDirection text_direction_; // The current Autofill query values. std::vector<autofill::Suggestion> suggestions_; // Elided values and labels corresponding to the suggestions_ vector to // ensure that it fits on the screen. std::vector<base::string16> elided_values_; std::vector<base::string16> elided_labels_; #if !defined(OS_ANDROID) // The fonts for the popup text. gfx::FontList value_font_list_; gfx::FontList label_font_list_; gfx::FontList warning_font_list_; gfx::FontList title_font_list_; #endif // The line that is currently selected by the user. // |kNoSelection| indicates that no line is currently selected. int selected_line_; base::WeakPtrFactory<AutofillPopupControllerImpl> weak_ptr_factory_; }; } // namespace autofill #endif // CHROME_BROWSER_UI_AUTOFILL_AUTOFILL_POPUP_CONTROLLER_IMPL_H_
ltilve/chromium
chrome/browser/ui/autofill/autofill_popup_controller_impl.h
C
bsd-3-clause
7,547
<!doctype html> <!-- This file is generated by build.py. --> <title>Reference for input tall.jpg; overflow:hidden; -o-object-fit:none; -o-object-position:1em top</title> <link rel="stylesheet" href="../../support/reftests.css"> <style> .helper { overflow:hidden } .helper > * { left:1em; top:0; } </style> <div id="ref"> <span class="helper"><img src="../../support/tall.jpg"></span> </div>
frivoal/presto-testo
css/image-fit/reftests/input-jpg-tall/hidden_none_1em_top-ref.html
HTML
bsd-3-clause
395
<!DOCTYPE html> <meta charset="utf-8"> <title>CSS Test: vertical-rl upright orientation Table Row/Rowgroup/Cell Ordering</title> <link rel="author" title="Elika J. Etemad" href="http://fantasai.inkedblade.net/contact"> <link rel="reviewer" title="Gérard Talbot" href="http://www.gtalbot.org/BrowserBugsSection/css21testsuite/"> <!-- 2016-01-19 --> <link rel="match" href="table-progression-001-ref.html"> <meta name="assert" content="This test checks that vertical-rl tables, whether LTR or RTL, order rows/rowgroups right to left and cells top-to-bottom when text-orientation is upright."> <link rel="help" href="http://www.w3.org/TR/css-writing-modes-3/#block-flow" title="3.1 Block Flow Direction: the 'writing-mode' property"> <link rel="help" href="http://www.w3.org/TR/css-writing-modes-3/#direction" title="2.1 Specifying Directionality: the 'direction' property"> <link rel="help" href="http://www.w3.org/TR/css-writing-modes-3/#text-orientation" title="5.1 Orienting Text: the 'text-orientation' property"> <style> .test { writing-mode: vertical-rl; text-orientation: upright; } [dir=rtl] { direction: rtl; } table { border-spacing: 0; margin: 1em; } td { width: 1em; height: 1em; border: solid gray; } .navy { background: navy} .blue { background: blue } .aqua { background: aqua } .teal { background: teal } .purp { background: purple } .pink { background: fuchsia } .yllw { background: yellow } .orng { background: orange } </style> <p>Test passes if the following three tables look identical. <table class="test"> <thead> <tr> <td class="navy"> <td class="blue"> <td colspan=2> <tfoot> <tr> <td colspan=2> <td class="aqua"> <td class="teal"> <tbody> <tr> <td rowspan=3> <td colspan=2> <td class="purp"> <tr> <td class="pink"> <td rowspan=2 colspan=2> <tr> <td class="yllw"> <tbody> <tr> <td class="orng"> <td colspan=3> </table> <table class="test" dir=rtl> <thead> <tr> <td class="navy"> <td class="blue"> <td colspan=2> <tfoot> <tr> <td colspan=2> <td class="aqua"> <td class="teal"> <tbody> <tr> <td rowspan=3> <td colspan=2> <td class="purp"> <tr> <td class="pink"> <td rowspan=2 colspan=2> <tr> <td class="yllw"> <tbody> <tr> <td class="orng"> <td colspan=3> </table> <table class="reference"> <tr> <td rowspan=2> <td class="orng"> <td colspan="3"> <td class="navy"> <tr> <td rowspan="3"> <td class="yllw"> <td class="pink"> <td rowspan="2"> <td class="blue"> <tr> <td class="aqua"> <td rowspan=2 colspan=2> <td rowspan=2> <tr> <td class="teal"> <td class="purp"> </table>
axinging/chromium-crosswalk
third_party/WebKit/LayoutTests/imported/csswg-test/css-writing-modes-3/table-progression-vrl-003.html
HTML
bsd-3-clause
2,861
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <script src="../../../../../tools/svgweb/src/svg.js" data-path="../../../../../tools/svgweb/src"></script> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"/> <meta name="keywords" content="W3C SVG 1.1 2nd Edition Test Suite"/> <meta name="description" content="W3C SVG 1.1 2nd Edition SVGWeb Test Suite"/> <title> SVG 1.1 2nd Edition Test (svgweb): masking-path-03-b.svg </title> <style type="text/css"> <!-- .bodytext { font-family:verdana, helvetica, sans-serif; font-size: 12pt; line-height: 125%; text-align: Left; margin-top: 0; margin-bottom: 0 } .pageTitle { line-height: 150%; font-size: 20pt; font-weight : 900; margin-bottom: 20pt } .pageSubTitle { color : blue; line-height: 100%; font-size: 24pt; font-weight : 900 } .openChapter { color : blue; line-height: 125%; font-weight : 900 } .openSection { color : blue; line-height: 125%; font-weight : 900 } .info { color : black; line-height: 110%; font-size: 10pt; font-weight : 100 } p { margin-top:0; margin-bottom:0; padding-top:0; padding-bottom:0 } blockquote { margin-top:0; margin-bottom:0; padding-top:0; padding-bottom:0 } .opscript { margin-left: 3%; margin-right: 3%; } .opscript p { margin-top: 0.7em } .navbar { background: black; color: white; font-weight: bold } .warning { color: red; text-align: Center;} a,a:visited { color: blue } --> </style> <link rel="prev" href="masking-path-02-b.html" /> <link rel="index" href="index.html" /> <link rel="next" href="masking-path-04-b.html" /> <script src="../resources/testharnessreport.js"></script> </head> <body class="bodytext"> <div class="linkbar"> <p>Specification link: <a target="spec" href="http://www.w3.org/TR/SVG11/masking.html#ClippingPaths">14.3 Clipping paths</a></p> <p> <a href="masking-path-02-b.html" rel="prev">masking-path-02-b ←</a> <a href="index.html" rel="index">index</a> <a href="masking-path-04-b.html" rel="next">→ masking-path-04-b</a> </p> </div> <div> <br /> <p class="warning"> Tests that contain the draft-watermark are under development and may be incorrectly testing a feature. </p> </div> <table align="center" border="0" cellspacing="0" cellpadding="10"> <tr> <td align="center" colspan="3"> <table border="0" cellpadding="8"> <tr> <td align="center" colspan="2" class="pageTitle"> <h1>masking-path-03-b.svg</h1> </td> </tr> <tr class="navbar"> <td align="center"> SVG Image </td> <td align="center"> PNG Image </td> </tr> <tr> <td align="right"> <!--[if IE]> <object src="../../svg/masking-path-03-b.svg" width="480" height="360" classid="image/svg+xml"><p style="font-size:300%;color:red">FAIL</p> <![endif]--> <!--[if !IE]>--> <object data="../../svg/masking-path-03-b.svg" width="480" height="360" type="image/svg+xml"><p style="font-size:300%;color:red">FAIL</p> <!--<![endif]--> </object> </td> <td align="left"> <img alt="raster image of masking-path-03-b.svg" src="../../png/masking-path-03-b.png" width="480" height="360"/> </td> </tr> </table> </td> </tr> </table> <div class="opscript"> <h2 id="operatorscript"> Operator Script </h2> <div> <p> Run the test. No interaction required. </p> </div> <h2 id="passcriteria"> Pass Criteria </h2> <div> <p>The test passes if:</p><ul> <li>The four "Outer Clip" boxed strings must not render outside the outermost 'svg' element (the 480x360 rectangular viewport) and must continue to be clipped to this viewport if the image is zoomed in or out, or panned.</li> <li>The four "Inner Clip" boxed strings must not render outside the bounds of the green rectangle.</li> </ul> </div> <h2 id="testdescription"> Test Description </h2> <div> <p> Test 'overflow'/'clip' on outermost and inner 'svg' elements. </p><p> There are two parts to the test. The first part tests viewport clipping on outermost 'svg' elements. The second part tests viewport clipping on inner 'svg' elements. </p><p> The test case also tests the initial value of the 'overflow' property to ensure that it is set to 'hidden' for all 'svg' elements. Tester should zoom out and/or pan to check this. </p><p> To test clipping to the outermost 'svg' element, a rectangle with a light blue interior, a light red border and a black string that says "Clip to outer 'svg'" is painted four times such that it will overflow each of the top, left, right and bottom sides of the bounds of the outermost 'svg' element, respectively. </p><p> To test clipping to inner 'svg' elements, a rectangle with a light red interior, a light blue border and a black string that says "Clip to inner 'svg'" is painted four times such that it will overflow each of the top, left, right and bottom sides of the bounds of an inner 'svg' element, respectively. </p><p> Note that minor text layout differences, as are permissible under CSS2 rules, can lead to slightly different visual results regarding where the text strings get clipped. </p> </div> </div> <br/> <div class="linkbar"> <p> <a href="masking-path-02-b.html" rel="prev">masking-path-02-b ←</a> <a href="index.html" rel="index">index</a> <a href="masking-path-04-b.html" rel="next">→ masking-path-04-b</a> </p> </div> </body> </html>
frivoal/presto-testo
SVG/Testsuites/W3C-1_1F2/harness/htmlSVGWeb/masking-path-03-b.html
HTML
bsd-3-clause
6,227
""" =========================================== Sparse coding with a precomputed dictionary =========================================== Transform a signal as a sparse combination of Ricker wavelets. This example visually compares different sparse coding methods using the :class:`sklearn.decomposition.SparseCoder` estimator. The Ricker (also known as mexican hat or the second derivative of a gaussian) is not a particularily good kernel to represent piecewise constant signals like this one. It can therefore be seen how much adding different widths of atoms matters and it therefore motivates learning the dictionary to best fit your type of signals. The richer dictionary on the right is not larger in size, heavier subsampling is performed in order to stay on the same order of magnitude. """ print __doc__ import numpy as np import matplotlib.pylab as pl from sklearn.decomposition import SparseCoder def ricker_function(resolution, center, width): """Discrete sub-sampled Ricker (mexican hat) wavelet""" x = np.linspace(0, resolution - 1, resolution) x = (2 / ((np.sqrt(3 * width) * np.pi ** 1 / 4))) * ( 1 - ((x - center) ** 2 / width ** 2)) * np.exp( (-(x - center) ** 2) / (2 * width ** 2)) return x def ricker_matrix(width, resolution, n_atoms): """Dictionary of Ricker (mexican hat) wavelets""" centers = np.linspace(0, resolution - 1, n_atoms) D = np.empty((n_atoms, resolution)) for i, center in enumerate(centers): D[i] = ricker_function(resolution, center, width) D /= np.sqrt(np.sum(D ** 2, axis=1))[:, np.newaxis] return D resolution = 1024 subsampling = 3 # subsampling factor width = 100 n_atoms = resolution / subsampling # Compute a wavelet dictionary D_fixed = ricker_matrix(width=width, resolution=resolution, n_atoms=n_atoms) D_multi = np.r_[tuple(ricker_matrix(width=w, resolution=resolution, n_atoms=np.floor(n_atoms / 5)) for w in (10, 50, 100, 500, 1000))] # Generate a signal y = np.linspace(0, resolution - 1, resolution) first_quarter = y < resolution / 4 y[first_quarter] = 3. y[np.logical_not(first_quarter)] = -1. # List the different sparse coding methods in the following format: # (title, transform_algorithm, transform_alpha, transform_n_nozero_coefs) estimators = [('OMP', 'omp', None, 15), ('Lasso', 'lasso_cd', 2, None), ] pl.figure(figsize=(13, 6)) for subplot, (D, title) in enumerate(zip((D_fixed, D_multi), ('fixed width', 'multiple widths'))): pl.subplot(1, 2, subplot + 1) pl.title('Sparse coding against %s dictionary' % title) pl.plot(y, ls='dotted', label='Original signal') # Do a wavelet approximation for title, algo, alpha, n_nonzero in estimators: coder = SparseCoder(dictionary=D, transform_n_nonzero_coefs=n_nonzero, transform_alpha=alpha, transform_algorithm=algo) x = coder.transform(y) density = len(np.flatnonzero(x)) x = np.ravel(np.dot(x, D)) squared_error = np.sum((y - x) ** 2) pl.plot(x, label='%s: %s nonzero coefs,\n%.2f error' % (title, density, squared_error)) # Soft thresholding debiasing coder = SparseCoder(dictionary=D, transform_algorithm='threshold', transform_alpha=20) x = coder.transform(y) _, idx = np.where(x != 0) x[0, idx], _, _, _ = np.linalg.lstsq(D[idx, :].T, y) x = np.ravel(np.dot(x, D)) squared_error = np.sum((y - x) ** 2) pl.plot(x, label='Thresholding w/ debiasing:\n%d nonzero coefs, %.2f error' % (len(idx), squared_error)) pl.axis('tight') pl.legend() pl.subplots_adjust(.04, .07, .97, .90, .09, .2) pl.show()
cdegroc/scikit-learn
examples/decomposition/plot_sparse_coding.py
Python
bsd-3-clause
3,808
<!doctype html> <title>getComputedStyle, embed</title> <script src="/resources/testharness.js"></script> <script src="/resources/testharnessreport.js"></script> <embed> <script> test(function() { var e = document.body.firstChild; var vals = ['none', 'fill', 'cover', 'contain', 'auto']; var expected = ['none', 'fill', 'cover', 'contain', 'auto']; for (var i = 0; i < vals.length; ++i) { e.style.OObjectFit = vals[i]; assert_equals(getComputedStyle(e, '').OObjectFit, expected[i], vals[i]); } }); </script>
frivoal/presto-testo
css/image-fit/js/getComputedStyle/016.html
HTML
bsd-3-clause
525
/* * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.imagepipeline.cache; import android.os.SystemClock; import com.facebook.common.internal.Supplier; import com.facebook.common.memory.MemoryTrimType; import com.facebook.common.references.CloseableReference; import com.facebook.common.references.ResourceReleaser; import com.android.internal.util.Predicate; import org.junit.*; import org.junit.runner.*; import org.mockito.*; import org.mockito.Mock; import org.powermock.api.mockito.*; import org.powermock.core.classloader.annotations.*; import org.powermock.modules.junit4.rule.*; import org.robolectric.*; import org.robolectric.annotation.*; import static org.junit.Assert.*; import static org.mockito.Matchers.anyInt; import static org.mockito.Mockito.*; @RunWith(RobolectricTestRunner.class) @PrepareForTest({SystemClock.class}) @PowerMockIgnore({ "org.mockito.*", "org.robolectric.*", "android.*" }) @Config(manifest=Config.NONE) public class CountingMemoryCacheTest { private static final int CACHE_MAX_SIZE = 1200; private static final int CACHE_MAX_COUNT = 4; private static final int CACHE_EVICTION_QUEUE_MAX_SIZE = 1100; private static final int CACHE_EVICTION_QUEUE_MAX_COUNT = 3; private static final int CACHE_ENTRY_MAX_SIZE = 1000; @Mock public ResourceReleaser<Integer> mReleaser; @Mock public CountingMemoryCache.CacheTrimStrategy mCacheTrimStrategy; @Mock public Supplier<MemoryCacheParams> mParamsSupplier; @Mock public CountingMemoryCache.EntryStateObserver<String> mEntryStateObserver; @Rule public PowerMockRule rule = new PowerMockRule(); private ValueDescriptor<Integer> mValueDescriptor; private MemoryCacheParams mParams; private CountingMemoryCache<String, Integer> mCache; private static final String KEY = "KEY"; private static final String[] KEYS = new String[] {"k0", "k1", "k2", "k3", "k4", "k5", "k6", "k7", "k8", "k9"}; @Before public void setUp() { MockitoAnnotations.initMocks(this); PowerMockito.mockStatic(SystemClock.class); PowerMockito.when(SystemClock.uptimeMillis()).thenReturn(0L); mValueDescriptor = new ValueDescriptor<Integer>() { @Override public int getSizeInBytes(Integer value) { return value; } }; mParams = new MemoryCacheParams( CACHE_MAX_SIZE, CACHE_MAX_COUNT, CACHE_EVICTION_QUEUE_MAX_SIZE, CACHE_EVICTION_QUEUE_MAX_COUNT, CACHE_ENTRY_MAX_SIZE); when(mParamsSupplier.get()).thenReturn(mParams); mCache = new CountingMemoryCache<>(mValueDescriptor, mCacheTrimStrategy, mParamsSupplier); } @Test public void testCache() { mCache.cache(KEY, newReference(100)); assertTotalSize(1, 100); assertExclusivelyOwnedSize(0, 0); assertSharedWithCount(KEY, 100, 1); verify(mReleaser, never()).release(anyInt()); } @Test public void testClosingOriginalReference() { CloseableReference<Integer> originalRef = newReference(100); mCache.cache(KEY, originalRef); // cache should make its own copy and closing the original reference after caching // should not affect the cached value originalRef.close(); assertTotalSize(1, 100); assertExclusivelyOwnedSize(0, 0); assertSharedWithCount(KEY, 100, 1); verify(mReleaser, never()).release(anyInt()); } @Test public void testClosingClientReference() { CloseableReference<Integer> cachedRef = mCache.cache(KEY, newReference(100)); // cached item should get exclusively owned cachedRef.close(); assertTotalSize(1, 100); assertExclusivelyOwnedSize(1, 100); assertExclusivelyOwned(KEY, 100); verify(mReleaser, never()).release(anyInt()); } @Test public void testNotExclusiveAtFirst() { mCache.cache(KEY, newReference(100), mEntryStateObserver); verify(mEntryStateObserver, never()).onExclusivityChanged(anyString(), anyBoolean()); } @Test public void testToggleExclusive() { CloseableReference<Integer> cachedRef = mCache.cache(KEY, newReference(100), mEntryStateObserver); cachedRef.close(); verify(mEntryStateObserver).onExclusivityChanged(KEY, true); mCache.get(KEY); verify(mEntryStateObserver).onExclusivityChanged(KEY, false); } @Test public void testCantReuseNonExclusive() { mCache.cache(KEY, newReference(100), mEntryStateObserver); assertNull(mCache.reuse(KEY)); verify(mEntryStateObserver, never()).onExclusivityChanged(anyString(), anyBoolean()); } @Test public void testCanReuseExclusive() { CloseableReference<Integer> cachedRef = mCache.cache(KEY, newReference(100), mEntryStateObserver); cachedRef.close(); verify(mEntryStateObserver).onExclusivityChanged(KEY, true); cachedRef = mCache.reuse(KEY); assertNotNull(cachedRef); verify(mEntryStateObserver).onExclusivityChanged(KEY, false); cachedRef.close(); verify(mEntryStateObserver).onExclusivityChanged(KEY, true); } @Test public void testInUseCount() { CloseableReference<Integer> cachedRef1 = mCache.cache(KEY, newReference(100)); CloseableReference<Integer> cachedRef2a = mCache.get(KEY); CloseableReference<Integer> cachedRef2b = cachedRef2a.clone(); assertTotalSize(1, 100); assertExclusivelyOwnedSize(0, 0); assertSharedWithCount(KEY, 100, 2); CloseableReference<Integer> cachedRef3a = mCache.get(KEY); CloseableReference<Integer> cachedRef3b = cachedRef3a.clone(); CloseableReference<Integer> cachedRef3c = cachedRef3b.clone(); assertTotalSize(1, 100); assertExclusivelyOwnedSize(0, 0); assertSharedWithCount(KEY, 100, 3); cachedRef1.close(); assertTotalSize(1, 100); assertExclusivelyOwnedSize(0, 0); assertSharedWithCount(KEY, 100, 2); // all copies of cachedRef2a need to be closed for usage count to drop cachedRef2a.close(); assertTotalSize(1, 100); assertExclusivelyOwnedSize(0, 0); assertSharedWithCount(KEY, 100, 2); cachedRef2b.close(); assertTotalSize(1, 100); assertExclusivelyOwnedSize(0, 0); assertSharedWithCount(KEY, 100, 1); // all copies of cachedRef3a need to be closed for usage count to drop cachedRef3c.close(); assertTotalSize(1, 100); assertExclusivelyOwnedSize(0, 0); assertSharedWithCount(KEY, 100, 1); cachedRef3b.close(); assertTotalSize(1, 100); assertExclusivelyOwnedSize(0, 0); assertSharedWithCount(KEY, 100, 1); cachedRef3a.close(); assertTotalSize(1, 100); assertExclusivelyOwnedSize(1, 100); assertExclusivelyOwned(KEY, 100); } @Test public void testCachingSameKeyTwice() { CloseableReference<Integer> originalRef1 = newReference(110); CloseableReference<Integer> cachedRef1 = mCache.cache(KEY, originalRef1); CloseableReference<Integer> cachedRef2a = mCache.get(KEY); CloseableReference<Integer> cachedRef2b = cachedRef2a.clone(); CloseableReference<Integer> cachedRef3 = mCache.get(KEY); CountingMemoryCache.Entry<String, Integer> entry1 = mCache.mCachedEntries.get(KEY); CloseableReference<Integer> cachedRef2 = mCache.cache(KEY, newReference(120)); CountingMemoryCache.Entry<String, Integer> entry2 = mCache.mCachedEntries.get(KEY); assertNotSame(entry1, entry2); assertOrphanWithCount(entry1, 3); assertSharedWithCount(KEY, 120, 1); // release the orphaned reference only when all clients are gone originalRef1.close(); cachedRef2b.close(); assertOrphanWithCount(entry1, 3); cachedRef2a.close(); assertOrphanWithCount(entry1, 2); cachedRef1.close(); assertOrphanWithCount(entry1, 1); verify(mReleaser, never()).release(anyInt()); cachedRef3.close(); assertOrphanWithCount(entry1, 0); verify(mReleaser).release(110); } @Test public void testDoesNotCacheBigValues() { assertNull(mCache.cache(KEY, newReference(CACHE_ENTRY_MAX_SIZE + 1))); } @Test public void testDoesCacheNotTooBigValues() { assertNotNull(mCache.cache(KEY, newReference(CACHE_ENTRY_MAX_SIZE))); } @Test public void testEviction_ByTotalSize() { // value 4 cannot fit the cache CloseableReference<Integer> originalRef1 = newReference(400); CloseableReference<Integer> valueRef1 = mCache.cache(KEYS[1], originalRef1); originalRef1.close(); CloseableReference<Integer> originalRef2 = newReference(500); CloseableReference<Integer> valueRef2 = mCache.cache(KEYS[2], originalRef2); originalRef2.close(); CloseableReference<Integer> originalRef3 = newReference(100); CloseableReference<Integer> valueRef3 = mCache.cache(KEYS[3], originalRef3); originalRef3.close(); CloseableReference<Integer> originalRef4 = newReference(700); CloseableReference<Integer> valueRef4 = mCache.cache(KEYS[4], originalRef4); originalRef4.close(); assertTotalSize(3, 1000); assertExclusivelyOwnedSize(0, 0); assertSharedWithCount(KEYS[1], 400, 1); assertSharedWithCount(KEYS[2], 500, 1); assertSharedWithCount(KEYS[3], 100, 1); assertNotCached(KEYS[4], 700); assertNull(valueRef4); // closing the clients of cached items will make them viable for eviction valueRef1.close(); valueRef2.close(); valueRef3.close(); assertTotalSize(3, 1000); assertExclusivelyOwnedSize(3, 1000); // value 4 can now fit after evicting value1 and value2 valueRef4 = mCache.cache(KEYS[4], newReference(700)); assertTotalSize(2, 800); assertExclusivelyOwnedSize(1, 100); assertNotCached(KEYS[1], 400); assertNotCached(KEYS[2], 500); assertExclusivelyOwned(KEYS[3], 100); assertSharedWithCount(KEYS[4], 700, 1); verify(mReleaser).release(400); verify(mReleaser).release(500); } @Test public void testEviction_ByTotalCount() { // value 5 cannot fit the cache CloseableReference<Integer> originalRef1 = newReference(110); CloseableReference<Integer> valueRef1 = mCache.cache(KEYS[1], originalRef1); originalRef1.close(); CloseableReference<Integer> originalRef2 = newReference(120); CloseableReference<Integer> valueRef2 = mCache.cache(KEYS[2], originalRef2); originalRef2.close(); CloseableReference<Integer> originalRef3 = newReference(130); CloseableReference<Integer> valueRef3 = mCache.cache(KEYS[3], originalRef3); originalRef3.close(); CloseableReference<Integer> originalRef4 = newReference(140); CloseableReference<Integer> valueRef4 = mCache.cache(KEYS[4], originalRef4); originalRef4.close(); CloseableReference<Integer> originalRef5 = newReference(150); CloseableReference<Integer> valueRef5 = mCache.cache(KEYS[5], originalRef5); originalRef5.close(); assertTotalSize(4, 500); assertExclusivelyOwnedSize(0, 0); assertSharedWithCount(KEYS[1], 110, 1); assertSharedWithCount(KEYS[2], 120, 1); assertSharedWithCount(KEYS[3], 130, 1); assertSharedWithCount(KEYS[4], 140, 1); assertNotCached(KEYS[5], 150); assertNull(valueRef5); // closing the clients of cached items will make them viable for eviction valueRef1.close(); valueRef2.close(); valueRef3.close(); assertTotalSize(4, 500); assertExclusivelyOwnedSize(3, 360); // value 4 can now fit after evicting value1 valueRef4 = mCache.cache(KEYS[5], newReference(150)); assertTotalSize(4, 540); assertExclusivelyOwnedSize(2, 250); assertNotCached(KEYS[1], 110); assertExclusivelyOwned(KEYS[2], 120); assertExclusivelyOwned(KEYS[3], 130); assertSharedWithCount(KEYS[4], 140, 1); assertSharedWithCount(KEYS[5], 150, 1); verify(mReleaser).release(110); } @Test public void testEviction_ByEvictionQueueSize() { CloseableReference<Integer> originalRef1 = newReference(200); CloseableReference<Integer> valueRef1 = mCache.cache(KEYS[1], originalRef1); originalRef1.close(); valueRef1.close(); CloseableReference<Integer> originalRef2 = newReference(300); CloseableReference<Integer> valueRef2 = mCache.cache(KEYS[2], originalRef2); originalRef2.close(); valueRef2.close(); CloseableReference<Integer> originalRef3 = newReference(700); CloseableReference<Integer> valueRef3 = mCache.cache(KEYS[3], originalRef3); originalRef3.close(); assertTotalSize(3, 1200); assertExclusivelyOwnedSize(2, 500); assertExclusivelyOwned(KEYS[1], 200); assertExclusivelyOwned(KEYS[2], 300); assertSharedWithCount(KEYS[3], 700, 1); verify(mReleaser, never()).release(anyInt()); // closing the client reference for item3 will cause item1 to be evicted valueRef3.close(); assertTotalSize(2, 1000); assertExclusivelyOwnedSize(2, 1000); assertNotCached(KEYS[1], 200); assertExclusivelyOwned(KEYS[2], 300); assertExclusivelyOwned(KEYS[3], 700); verify(mReleaser).release(200); } @Test public void testEviction_ByEvictionQueueCount() { CloseableReference<Integer> originalRef1 = newReference(110); CloseableReference<Integer> valueRef1 = mCache.cache(KEYS[1], originalRef1); originalRef1.close(); valueRef1.close(); CloseableReference<Integer> originalRef2 = newReference(120); CloseableReference<Integer> valueRef2 = mCache.cache(KEYS[2], originalRef2); originalRef2.close(); valueRef2.close(); CloseableReference<Integer> originalRef3 = newReference(130); CloseableReference<Integer> valueRef3 = mCache.cache(KEYS[3], originalRef3); originalRef3.close(); valueRef3.close(); CloseableReference<Integer> originalRef4 = newReference(140); CloseableReference<Integer> valueRef4 = mCache.cache(KEYS[4], originalRef4); originalRef4.close(); assertTotalSize(4, 500); assertExclusivelyOwnedSize(3, 360); assertExclusivelyOwned(KEYS[1], 110); assertExclusivelyOwned(KEYS[2], 120); assertExclusivelyOwned(KEYS[3], 130); assertSharedWithCount(KEYS[4], 140, 1); verify(mReleaser, never()).release(anyInt()); // closing the client reference for item4 will cause item1 to be evicted valueRef4.close(); assertTotalSize(3, 390); assertExclusivelyOwnedSize(3, 390); assertNotCached(KEYS[1], 110); assertExclusivelyOwned(KEYS[2], 120); assertExclusivelyOwned(KEYS[3], 130); assertExclusivelyOwned(KEYS[4], 140); verify(mReleaser).release(110); } @Test public void testUpdatesCacheParams() { InOrder inOrder = inOrder(mParamsSupplier); CloseableReference<Integer> originalRef = newReference(700); CloseableReference<Integer> cachedRef = mCache.cache(KEYS[2], originalRef); originalRef.close(); cachedRef.close(); mCache.get(KEY); inOrder.verify(mParamsSupplier).get(); PowerMockito.when(SystemClock.uptimeMillis()) .thenReturn(CountingMemoryCache.PARAMS_INTERCHECK_INTERVAL_MS - 1); mCache.get(KEY); inOrder.verify(mParamsSupplier, never()).get(); mCache.get(KEY); inOrder.verify(mParamsSupplier, never()).get(); assertTotalSize(1, 700); assertExclusivelyOwnedSize(1, 700); mParams = new MemoryCacheParams( 500 /* cache max size */, CACHE_MAX_COUNT, CACHE_EVICTION_QUEUE_MAX_SIZE, CACHE_EVICTION_QUEUE_MAX_COUNT, CACHE_ENTRY_MAX_SIZE); when(mParamsSupplier.get()).thenReturn(mParams); PowerMockito.when(SystemClock.uptimeMillis()) .thenReturn(CountingMemoryCache.PARAMS_INTERCHECK_INTERVAL_MS); mCache.get(KEY); inOrder.verify(mParamsSupplier).get(); assertTotalSize(0, 0); assertExclusivelyOwnedSize(0, 0); verify(mReleaser).release(700); } @Test public void testRemoveAllMatchingPredicate() { CloseableReference<Integer> originalRef1 = newReference(110); CloseableReference<Integer> valueRef1 = mCache.cache(KEYS[1], originalRef1); originalRef1.close(); valueRef1.close(); CloseableReference<Integer> originalRef2 = newReference(120); CloseableReference<Integer> valueRef2 = mCache.cache(KEYS[2], originalRef2); originalRef2.close(); valueRef2.close(); CloseableReference<Integer> originalRef3 = newReference(130); CloseableReference<Integer> valueRef3 = mCache.cache(KEYS[3], originalRef3); originalRef3.close(); CountingMemoryCache.Entry<String, Integer> entry3 = mCache.mCachedEntries.get(KEYS[3]); CloseableReference<Integer> originalRef4 = newReference(150); CloseableReference<Integer> valueRef4 = mCache.cache(KEYS[4], originalRef4); originalRef4.close(); int numEvictedEntries = mCache.removeAll( new Predicate<String>() { @Override public boolean apply(String key) { return key.equals(KEYS[2]) || key.equals(KEYS[3]); } }); assertEquals(2, numEvictedEntries); assertTotalSize(2, 260); assertExclusivelyOwnedSize(1, 110); assertExclusivelyOwned(KEYS[1], 110); assertNotCached(KEYS[2], 120); assertOrphanWithCount(entry3, 1); assertSharedWithCount(KEYS[4], 150, 1); verify(mReleaser).release(120); verify(mReleaser, never()).release(130); valueRef3.close(); verify(mReleaser).release(130); } @Test public void testClear() { CloseableReference<Integer> originalRef1 = newReference(110); CloseableReference<Integer> cachedRef1 = mCache.cache(KEYS[1], originalRef1); originalRef1.close(); CountingMemoryCache.Entry<String, Integer> entry1 = mCache.mCachedEntries.get(KEYS[1]); CloseableReference<Integer> originalRef2 = newReference(120); CloseableReference<Integer> cachedRef2 = mCache.cache(KEYS[2], originalRef2); originalRef2.close(); cachedRef2.close(); mCache.clear(); assertTotalSize(0, 0); assertExclusivelyOwnedSize(0, 0); assertOrphanWithCount(entry1, 1); assertNotCached(KEYS[2], 120); verify(mReleaser).release(120); cachedRef1.close(); verify(mReleaser).release(110); } @Test public void testTrimming() { MemoryTrimType memoryTrimType = MemoryTrimType.OnCloseToDalvikHeapLimit; mParams = new MemoryCacheParams(1100, 10, 1100, 10, 110); when(mParamsSupplier.get()).thenReturn(mParams); PowerMockito.when(SystemClock.uptimeMillis()) .thenReturn(CountingMemoryCache.PARAMS_INTERCHECK_INTERVAL_MS); InOrder inOrder = inOrder(mReleaser); // create original references CloseableReference<Integer>[] originalRefs = new CloseableReference[10]; for (int i = 0; i < 10; i++) { originalRefs[i] = newReference(100 + i); } // cache items & close the original references CloseableReference<Integer>[] cachedRefs = new CloseableReference[10]; for (int i = 0; i < 10; i++) { cachedRefs[i] = mCache.cache(KEYS[i], originalRefs[i]); originalRefs[i].close(); } // cache should keep alive the items until evicted inOrder.verify(mReleaser, never()).release(anyInt()); // trimming cannot evict shared entries when(mCacheTrimStrategy.getTrimRatio(memoryTrimType)).thenReturn(1.00); mCache.trim(memoryTrimType); assertSharedWithCount(KEYS[0], 100, 1); assertSharedWithCount(KEYS[1], 101, 1); assertSharedWithCount(KEYS[2], 102, 1); assertSharedWithCount(KEYS[3], 103, 1); assertSharedWithCount(KEYS[4], 104, 1); assertSharedWithCount(KEYS[5], 105, 1); assertSharedWithCount(KEYS[6], 106, 1); assertSharedWithCount(KEYS[7], 107, 1); assertSharedWithCount(KEYS[8], 108, 1); assertSharedWithCount(KEYS[9], 109, 1); assertTotalSize(10, 1045); assertExclusivelyOwnedSize(0, 0); // close 7 client references cachedRefs[8].close(); cachedRefs[2].close(); cachedRefs[7].close(); cachedRefs[3].close(); cachedRefs[6].close(); cachedRefs[4].close(); cachedRefs[5].close(); assertSharedWithCount(KEYS[0], 100, 1); assertSharedWithCount(KEYS[1], 101, 1); assertSharedWithCount(KEYS[9], 109, 1); assertExclusivelyOwned(KEYS[8], 108); assertExclusivelyOwned(KEYS[2], 102); assertExclusivelyOwned(KEYS[7], 107); assertExclusivelyOwned(KEYS[3], 103); assertExclusivelyOwned(KEYS[6], 106); assertExclusivelyOwned(KEYS[4], 104); assertExclusivelyOwned(KEYS[5], 105); assertTotalSize(10, 1045); assertExclusivelyOwnedSize(7, 735); // Trim cache by 45%. This means that out of total of 1045 bytes cached, 574 should remain. // 310 bytes is used by the clients, which leaves 264 for the exclusively owned items. // Only the two most recent exclusively owned items fit, and they occupy 209 bytes. when(mCacheTrimStrategy.getTrimRatio(memoryTrimType)).thenReturn(0.45); mCache.trim(memoryTrimType); assertSharedWithCount(KEYS[0], 100, 1); assertSharedWithCount(KEYS[1], 101, 1); assertSharedWithCount(KEYS[9], 109, 1); assertExclusivelyOwned(KEYS[4], 104); assertExclusivelyOwned(KEYS[5], 105); assertNotCached(KEYS[8], 108); assertNotCached(KEYS[2], 102); assertNotCached(KEYS[7], 107); assertNotCached(KEYS[3], 103); assertNotCached(KEYS[6], 106); assertTotalSize(5, 519); assertExclusivelyOwnedSize(2, 209); inOrder.verify(mReleaser).release(108); inOrder.verify(mReleaser).release(102); inOrder.verify(mReleaser).release(107); inOrder.verify(mReleaser).release(103); inOrder.verify(mReleaser).release(106); // Full trim. All exclusively owned items should be evicted. when(mCacheTrimStrategy.getTrimRatio(memoryTrimType)).thenReturn(1.00); mCache.trim(memoryTrimType); assertSharedWithCount(KEYS[0], 100, 1); assertSharedWithCount(KEYS[1], 101, 1); assertSharedWithCount(KEYS[9], 109, 1); assertNotCached(KEYS[8], 108); assertNotCached(KEYS[2], 102); assertNotCached(KEYS[7], 107); assertNotCached(KEYS[3], 103); assertNotCached(KEYS[6], 106); assertNotCached(KEYS[6], 104); assertNotCached(KEYS[6], 105); assertTotalSize(3, 310); assertExclusivelyOwnedSize(0, 0); inOrder.verify(mReleaser).release(104); inOrder.verify(mReleaser).release(105); } private CloseableReference<Integer> newReference(int size) { return CloseableReference.of(size, mReleaser); } private void assertSharedWithCount(String key, Integer value, int count) { assertTrue("key not found in the cache", mCache.mCachedEntries.contains(key)); assertFalse("key found in the exclusives", mCache.mExclusiveEntries.contains(key)); CountingMemoryCache.Entry<String, Integer> entry = mCache.mCachedEntries.get(key); assertNotNull("entry not found in the cache", entry); assertEquals("key mismatch", key, entry.key); assertEquals("value mismatch", value, entry.valueRef.get()); assertEquals("client count mismatch", count, entry.clientCount); assertFalse("entry is an orphan", entry.isOrphan); } private void assertExclusivelyOwned(String key, Integer value) { assertTrue("key not found in the cache", mCache.mCachedEntries.contains(key)); assertTrue("key not found in the exclusives", mCache.mExclusiveEntries.contains(key)); CountingMemoryCache.Entry<String, Integer> entry = mCache.mCachedEntries.get(key); assertNotNull("entry not found in the cache", entry); assertEquals("key mismatch", key, entry.key); assertEquals("value mismatch", value, entry.valueRef.get()); assertEquals("client count greater than zero", 0, entry.clientCount); assertFalse("entry is an orphan", entry.isOrphan); } private void assertNotCached(String key, Integer value) { assertFalse("key found in the cache", mCache.mCachedEntries.contains(key)); assertFalse("key found in the exclusives", mCache.mExclusiveEntries.contains(key)); } private void assertOrphanWithCount(CountingMemoryCache.Entry<String, Integer> entry, int count) { assertNotSame("entry found in the exclusives", entry, mCache.mCachedEntries.get(entry.key)); assertNotSame("entry found in the cache", entry, mCache.mExclusiveEntries.get(entry.key)); assertTrue("entry is not an orphan", entry.isOrphan); assertEquals("client count mismatch", count, entry.clientCount); } private void assertTotalSize(int count, int bytes) { assertEquals("total cache count mismatch", count, mCache.getCount()); assertEquals("total cache size mismatch", bytes, mCache.getSizeInBytes()); } private void assertExclusivelyOwnedSize(int count, int bytes) { assertEquals("total exclusives count mismatch", count, mCache.getEvictionQueueCount()); assertEquals("total exclusives size mismatch", bytes, mCache.getEvictionQueueSizeInBytes()); } }
HKMOpen/fresco
imagepipeline-base/src/test/java/com/facebook/imagepipeline/cache/CountingMemoryCacheTest.java
Java
bsd-3-clause
24,794
# -*- coding: utf-8 -*- """ CSS Testing :copyright: (C) 2014 by Openlabs Technologies & Consulting (P) Limited :license: BSD, see LICENSE for more details. """ from os.path import join from cssutils import CSSParser import unittest import trytond.tests.test_tryton dir = 'static/css/' class CSSTest(unittest.TestCase): """ Test case for CSS. """ def validate(self, filename): """ Uses cssutils to validate a css file. Prints output using a logger. """ CSSParser(raiseExceptions=True).parseFile(filename, validate=True) def test_css(self): """ Test for CSS validation using W3C standards. """ cssfile = join(dir, 'style.css') self.validate(cssfile) def suite(): """ Define suite """ test_suite = trytond.tests.test_tryton.suite() test_suite.addTests( unittest.TestLoader().loadTestsFromTestCase(CSSTest) ) return test_suite if __name__ == '__main__': unittest.TextTestRunner(verbosity=2).run(suite())
gautampanday/nereid-webshop
tests/test_css.py
Python
bsd-3-clause
1,062
/*-- Copyright (C) 2003-2007 Wolf Paulus. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions, and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions, and the disclaimer that follows these conditions in the documentation and/or other materials provided with the distribution. 3. The end-user documentation included with the redistribution, if any, must include the following acknowledgment: "This product includes software developed by the SWIXML Project (http://www.swixml.org/)." Alternately, this acknowledgment may appear in the software itself, if and wherever such third-party acknowledgments normally appear. 4. The name "Swixml" must not be used to endorse or promote products derived from this software without prior written permission. For written permission, please contact <info_AT_swixml_DOT_org> 5. Products derived from this software may not be called "Swixml", nor may "Swixml" appear in their name, without prior written permission from the Swixml Project Management. THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE SWIXML PROJECT OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ==================================================================== This software consists of voluntary contributions made by many individuals on behalf of the Swixml Project and was originally created by Wolf Paulus <wolf_AT_swixml_DOT_org>. For more information on the Swixml Project, please see <http://www.swixml.org/>. */ package org.swixml.layoutconverters; import java.awt.LayoutManager; import java.util.StringTokenizer; import org.swixml.Attribute; import org.swixml.LayoutConverter; import org.swixml.converters.Util; import org.w3c.dom.Element; import com.jgoodies.forms.layout.CellConstraints; import com.jgoodies.forms.layout.FormLayout; /** * A layout converter for <code>com.jgoodies.forms.layout.FormLayout</code>. * * <p><b>Examples:</b></p> * <pre> * &lt;panel&gt; * &lt;layout type="FormLayout" * columns="p, 3dlu, p:grow" * rows="p, 3dlu, p"/&gt; * * &lt;label constraints="1,1" text="Company"/&gt; * &lt;textfield constraints="3,1"/&gt; * &lt;label constraints="1,3" text="Contact"/&gt; * &lt;textfield constraints="3,3"/&gt; * &lt;/panel&gt; * </pre> * * <pre> * &lt;panel&gt; * &lt;layout type="FormLayout" * columns="right:max(40dlu;pref), 3dlu, d:grow, 7dlu, right:pref, 3dlu, d:grow" * rows="p, 3dlu, p, 9dlu, p, 3dlu, p" * columnGroups="1,5; 3,7"/&gt; * * &lt;label constraints="1,1" text="Company"/&gt; * &lt;textfield constraints="3,1,5,1"/&gt; * &lt;label constraints="1,3" text="Contact"/&gt; * &lt;textfield constraints="3,3,5,1"/&gt; * * &lt;label constraints="1,5" text="PTI [kW]"/&gt; * &lt;textfield constraints="3,5"/&gt; * &lt;label constraints="5,5" text="Power [kW]"/&gt; * &lt;textfield constraints="7,5"/&gt; * &lt;label constraints="1,7" text="R [mm]"/&gt; * &lt;textfield constraints="3,7"/&gt; * &lt;label constraints="5,7" text="D [mm]"/&gt; * &lt;textfield constraints="7,7"/&gt; * &lt;/panel&gt; * </pre> * * @author Karl Tauber */ public class FormLayoutConverter implements LayoutConverter { /** * Returns "formlayout". */ public String getID() { return "formlayout"; } /** * Returns always <code>null</code>. */ public LayoutManager convertLayoutAttribute( final Attribute attr ) { return null; } /** * <p>Creates a FormLayout instance.</p> * * <p><b>Attributes:</b></p> * <ul> * <li><code>columns</code> (required): The column specifications as documented in JGoodies FormLayout.</li> * <li><code>row</code> (required): The row specifications as documented in JGoodies FormLayout.</li> * <li><code>columnGroups</code> (optional): The column groups, where each column * in a group gets the same group wide width. Groups are separated by semicolons, * column indices in a group are separated by colons. E.g. "1,5; 3,7,9" defines * two groups, where first group contains columns 1 and 5; and second group * contains columns 3, 7 and 9. Note that column indices are 1-based.</li> * <li><code>rowGroups</code> (optional): The row groups, where each row * in a group gets the same group wide height. Groups are separated by semicolons, * row indices in a group are separated by colons. E.g. "1,5; 3,7,9" defines * two groups, where first group contains rows 1 and 5; and second group * contains rows 3, 7 and 9. Note that row indices are 1-based.</li> * </ul> * * <p><b>Examples for Valid XML element notations:</b></p> * <ul> * <li><code>&lt;layout type="FormLayout" columns="p, 3dlu, p" rows="p, 3dlu, p"/&gt;</code></li> * <li><code>&lt;layout type="FormLayout" columns="p, 3dlu, p, 3dlu, p, 3dlu, p" rows="p, 3dlu, p" * columnGroups="1,5; 3,7" rowGroups="1,3"/&gt;</code></li> * </ul> */ public LayoutManager convertLayoutElement( final Element element ) { String encodedColumnSpecs = Attribute.getAttributeValue(element,"columns"); String encodedRowSpecs = Attribute.getAttributeValue(element,"rows"); int[][] columnGroupIndices = convertGroupIndices(Attribute.getAttributeValue(element,"columnGroups")); int[][] rowGroupIndices = convertGroupIndices(Attribute.getAttributeValue(element,"rowGroups")); FormLayout lm = new FormLayout( encodedColumnSpecs, encodedRowSpecs ); if (columnGroupIndices != null) lm.setColumnGroups(columnGroupIndices); if (rowGroupIndices != null) lm.setRowGroups(rowGroupIndices); return lm; } /** * <p>Creates a CellConstraints instance.</p> * * <p>Allowed syntaxes of attribute value:</p> * <ul> * <li><code>"x, y"</code></li> * <li><code>"x, y, w, h"</code></li> * <li><code>"x, y, hAlign, vAlign"</code></li> * <li><code>"x, y, w, h, hAlign, vAlign"</code></li> * </ul> * <p>See JGoodies FormLayout for details.</p> * * <p><b>Examples for Valid XML attribute notations:</b></p> * <ul> * <li><code>constraints="1, 3"</code></li> * <li><code>constraints="1, 3, 2, 1"</code></li> * <li><code>constraints="1, 3, left, bottom"</code></li> * <li><code>constraints="1, 3, 2, 1, l, b"</code></li> * </ul> */ public Object convertConstraintsAttribute( final Attribute attr ) { return new CellConstraints( attr.getValue() ); } /** * Returns always <code>null</code>. */ public Object convertConstraintsElement( final Element element ) { return null; } private int[][] convertGroupIndices( final String groups ) { if (groups == null) return null; StringTokenizer st = new StringTokenizer( groups, ";" ); int[][] groupIndices = new int[st.countTokens()][]; int i = 0; while (st.hasMoreTokens() ) { String group = st.nextToken(); groupIndices[i++] = Util.ia(new StringTokenizer( group, "," )); } return groupIndices; } }
boyjimeking/paintown
editor/src/swixml_220/src/org/swixml/layoutconverters/FormLayoutConverter.java
Java
bsd-3-clause
7,909
/* * Copyright 1999-2021 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy * in the file LICENSE in the source distribution or at * https://www.openssl.org/source/license.html */ #include <stdio.h> #include "internal/cryptlib.h" #include <openssl/evp.h> #include <openssl/core.h> #include <openssl/core_names.h> #include <openssl/pkcs12.h> #include <openssl/x509.h> #include "crypto/evp.h" #include "evp_local.h" /* Password based encryption (PBE) functions */ /* Setup a cipher context from a PBE algorithm */ struct evp_pbe_st { int pbe_type; int pbe_nid; int cipher_nid; int md_nid; EVP_PBE_KEYGEN *keygen; EVP_PBE_KEYGEN_EX *keygen_ex; }; static STACK_OF(EVP_PBE_CTL) *pbe_algs; static const EVP_PBE_CTL builtin_pbe[] = { {EVP_PBE_TYPE_OUTER, NID_pbeWithMD2AndDES_CBC, NID_des_cbc, NID_md2, PKCS5_PBE_keyivgen, PKCS5_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbeWithMD5AndDES_CBC, NID_des_cbc, NID_md5, PKCS5_PBE_keyivgen, PKCS5_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbeWithSHA1AndRC2_CBC, NID_rc2_64_cbc, NID_sha1, PKCS5_PBE_keyivgen, PKCS5_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_id_pbkdf2, -1, -1, PKCS5_v2_PBKDF2_keyivgen}, {EVP_PBE_TYPE_OUTER, NID_pbe_WithSHA1And128BitRC4, NID_rc4, NID_sha1, PKCS12_PBE_keyivgen, &PKCS12_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbe_WithSHA1And40BitRC4, NID_rc4_40, NID_sha1, PKCS12_PBE_keyivgen, &PKCS12_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbe_WithSHA1And3_Key_TripleDES_CBC, NID_des_ede3_cbc, NID_sha1, PKCS12_PBE_keyivgen, &PKCS12_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbe_WithSHA1And2_Key_TripleDES_CBC, NID_des_ede_cbc, NID_sha1, PKCS12_PBE_keyivgen, &PKCS12_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbe_WithSHA1And128BitRC2_CBC, NID_rc2_cbc, NID_sha1, PKCS12_PBE_keyivgen, &PKCS12_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbe_WithSHA1And40BitRC2_CBC, NID_rc2_40_cbc, NID_sha1, PKCS12_PBE_keyivgen, &PKCS12_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbes2, -1, -1, PKCS5_v2_PBE_keyivgen, &PKCS5_v2_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbeWithMD2AndRC2_CBC, NID_rc2_64_cbc, NID_md2, PKCS5_PBE_keyivgen, PKCS5_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbeWithMD5AndRC2_CBC, NID_rc2_64_cbc, NID_md5, PKCS5_PBE_keyivgen, PKCS5_PBE_keyivgen_ex}, {EVP_PBE_TYPE_OUTER, NID_pbeWithSHA1AndDES_CBC, NID_des_cbc, NID_sha1, PKCS5_PBE_keyivgen, PKCS5_PBE_keyivgen_ex}, {EVP_PBE_TYPE_PRF, NID_hmacWithSHA1, -1, NID_sha1, 0}, {EVP_PBE_TYPE_PRF, NID_hmac_md5, -1, NID_md5, 0}, {EVP_PBE_TYPE_PRF, NID_hmac_sha1, -1, NID_sha1, 0}, {EVP_PBE_TYPE_PRF, NID_hmacWithMD5, -1, NID_md5, 0}, {EVP_PBE_TYPE_PRF, NID_hmacWithSHA224, -1, NID_sha224, 0}, {EVP_PBE_TYPE_PRF, NID_hmacWithSHA256, -1, NID_sha256, 0}, {EVP_PBE_TYPE_PRF, NID_hmacWithSHA384, -1, NID_sha384, 0}, {EVP_PBE_TYPE_PRF, NID_hmacWithSHA512, -1, NID_sha512, 0}, {EVP_PBE_TYPE_PRF, NID_id_HMACGostR3411_94, -1, NID_id_GostR3411_94, 0}, {EVP_PBE_TYPE_PRF, NID_id_tc26_hmac_gost_3411_2012_256, -1, NID_id_GostR3411_2012_256, 0}, {EVP_PBE_TYPE_PRF, NID_id_tc26_hmac_gost_3411_2012_512, -1, NID_id_GostR3411_2012_512, 0}, {EVP_PBE_TYPE_PRF, NID_hmacWithSHA512_224, -1, NID_sha512_224, 0}, {EVP_PBE_TYPE_PRF, NID_hmacWithSHA512_256, -1, NID_sha512_256, 0}, {EVP_PBE_TYPE_KDF, NID_id_pbkdf2, -1, -1, PKCS5_v2_PBKDF2_keyivgen, &PKCS5_v2_PBKDF2_keyivgen_ex}, #ifndef OPENSSL_NO_SCRYPT {EVP_PBE_TYPE_KDF, NID_id_scrypt, -1, -1, PKCS5_v2_scrypt_keyivgen, &PKCS5_v2_scrypt_keyivgen_ex} #endif }; int EVP_PBE_CipherInit_ex(ASN1_OBJECT *pbe_obj, const char *pass, int passlen, ASN1_TYPE *param, EVP_CIPHER_CTX *ctx, int en_de, OSSL_LIB_CTX *libctx, const char *propq) { const EVP_CIPHER *cipher = NULL; EVP_CIPHER *cipher_fetch = NULL; const EVP_MD *md = NULL; EVP_MD *md_fetch = NULL; int ret = 0, cipher_nid, md_nid; EVP_PBE_KEYGEN_EX *keygen_ex; EVP_PBE_KEYGEN *keygen; if (!EVP_PBE_find_ex(EVP_PBE_TYPE_OUTER, OBJ_obj2nid(pbe_obj), &cipher_nid, &md_nid, &keygen, &keygen_ex)) { char obj_tmp[80]; if (pbe_obj == NULL) OPENSSL_strlcpy(obj_tmp, "NULL", sizeof(obj_tmp)); else i2t_ASN1_OBJECT(obj_tmp, sizeof(obj_tmp), pbe_obj); ERR_raise_data(ERR_LIB_EVP, EVP_R_UNKNOWN_PBE_ALGORITHM, "TYPE=%s", obj_tmp); goto err; } if (pass == NULL) passlen = 0; else if (passlen == -1) passlen = strlen(pass); if (cipher_nid != -1) { (void)ERR_set_mark(); cipher = cipher_fetch = EVP_CIPHER_fetch(libctx, OBJ_nid2sn(cipher_nid), propq); /* Fallback to legacy method */ if (cipher == NULL) cipher = EVP_get_cipherbynid(cipher_nid); if (cipher == NULL) { (void)ERR_clear_last_mark(); ERR_raise_data(ERR_LIB_EVP, EVP_R_UNKNOWN_CIPHER, OBJ_nid2sn(cipher_nid)); goto err; } (void)ERR_pop_to_mark(); } if (md_nid != -1) { (void)ERR_set_mark(); md = md_fetch = EVP_MD_fetch(libctx, OBJ_nid2sn(md_nid), propq); /* Fallback to legacy method */ if (md == NULL) EVP_get_digestbynid(md_nid); if (md == NULL) { (void)ERR_clear_last_mark(); ERR_raise(ERR_LIB_EVP, EVP_R_UNKNOWN_DIGEST); goto err; } (void)ERR_pop_to_mark(); } /* Try extended keygen with libctx/propq first, fall back to legacy keygen */ if (keygen_ex != NULL) ret = keygen_ex(ctx, pass, passlen, param, cipher, md, en_de, libctx, propq); else ret = keygen(ctx, pass, passlen, param, cipher, md, en_de); err: EVP_CIPHER_free(cipher_fetch); EVP_MD_free(md_fetch); return ret; } int EVP_PBE_CipherInit(ASN1_OBJECT *pbe_obj, const char *pass, int passlen, ASN1_TYPE *param, EVP_CIPHER_CTX *ctx, int en_de) { return EVP_PBE_CipherInit_ex(pbe_obj, pass, passlen, param, ctx, en_de, NULL, NULL); } DECLARE_OBJ_BSEARCH_CMP_FN(EVP_PBE_CTL, EVP_PBE_CTL, pbe2); static int pbe2_cmp(const EVP_PBE_CTL *pbe1, const EVP_PBE_CTL *pbe2) { int ret = pbe1->pbe_type - pbe2->pbe_type; if (ret) return ret; else return pbe1->pbe_nid - pbe2->pbe_nid; } IMPLEMENT_OBJ_BSEARCH_CMP_FN(EVP_PBE_CTL, EVP_PBE_CTL, pbe2); static int pbe_cmp(const EVP_PBE_CTL *const *a, const EVP_PBE_CTL *const *b) { int ret = (*a)->pbe_type - (*b)->pbe_type; if (ret) return ret; else return (*a)->pbe_nid - (*b)->pbe_nid; } /* Add a PBE algorithm */ int EVP_PBE_alg_add_type(int pbe_type, int pbe_nid, int cipher_nid, int md_nid, EVP_PBE_KEYGEN *keygen) { EVP_PBE_CTL *pbe_tmp; if (pbe_algs == NULL) { pbe_algs = sk_EVP_PBE_CTL_new(pbe_cmp); if (pbe_algs == NULL) goto err; } if ((pbe_tmp = OPENSSL_malloc(sizeof(*pbe_tmp))) == NULL) goto err; pbe_tmp->pbe_type = pbe_type; pbe_tmp->pbe_nid = pbe_nid; pbe_tmp->cipher_nid = cipher_nid; pbe_tmp->md_nid = md_nid; pbe_tmp->keygen = keygen; if (!sk_EVP_PBE_CTL_push(pbe_algs, pbe_tmp)) { OPENSSL_free(pbe_tmp); goto err; } return 1; err: ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE); return 0; } int EVP_PBE_alg_add(int nid, const EVP_CIPHER *cipher, const EVP_MD *md, EVP_PBE_KEYGEN *keygen) { int cipher_nid, md_nid; if (cipher) cipher_nid = EVP_CIPHER_get_nid(cipher); else cipher_nid = -1; if (md) md_nid = EVP_MD_get_type(md); else md_nid = -1; return EVP_PBE_alg_add_type(EVP_PBE_TYPE_OUTER, nid, cipher_nid, md_nid, keygen); } int EVP_PBE_find_ex(int type, int pbe_nid, int *pcnid, int *pmnid, EVP_PBE_KEYGEN **pkeygen, EVP_PBE_KEYGEN_EX **pkeygen_ex) { EVP_PBE_CTL *pbetmp = NULL, pbelu; int i; if (pbe_nid == NID_undef) return 0; pbelu.pbe_type = type; pbelu.pbe_nid = pbe_nid; if (pbe_algs != NULL) { i = sk_EVP_PBE_CTL_find(pbe_algs, &pbelu); pbetmp = sk_EVP_PBE_CTL_value(pbe_algs, i); } if (pbetmp == NULL) { pbetmp = OBJ_bsearch_pbe2(&pbelu, builtin_pbe, OSSL_NELEM(builtin_pbe)); } if (pbetmp == NULL) return 0; if (pcnid != NULL) *pcnid = pbetmp->cipher_nid; if (pmnid != NULL) *pmnid = pbetmp->md_nid; if (pkeygen != NULL) *pkeygen = pbetmp->keygen; if (pkeygen_ex != NULL) *pkeygen_ex = pbetmp->keygen_ex; return 1; } int EVP_PBE_find(int type, int pbe_nid, int *pcnid, int *pmnid, EVP_PBE_KEYGEN **pkeygen) { return EVP_PBE_find_ex(type, pbe_nid, pcnid, pmnid, pkeygen, NULL); } static void free_evp_pbe_ctl(EVP_PBE_CTL *pbe) { OPENSSL_free(pbe); } void EVP_PBE_cleanup(void) { sk_EVP_PBE_CTL_pop_free(pbe_algs, free_evp_pbe_ctl); pbe_algs = NULL; } int EVP_PBE_get(int *ptype, int *ppbe_nid, size_t num) { const EVP_PBE_CTL *tpbe; if (num >= OSSL_NELEM(builtin_pbe)) return 0; tpbe = builtin_pbe + num; if (ptype) *ptype = tpbe->pbe_type; if (ppbe_nid) *ppbe_nid = tpbe->pbe_nid; return 1; }
jens-maus/amissl
openssl/crypto/evp/evp_pbe.c
C
bsd-3-clause
9,652
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <title>Test: width:49%; height:49%; width="70" height="10%"</title> </head> <body> <p>You should see a blue rectangle 49% wide and 49% high when viewing this file. Image size is 500 x 500.</p> <p><img style="border:1px solid #000" src="standalone_svgs/standalone--pct-pct--px-pct.svg" alt="empty svg" width="500" height="500" /></p> </body> </html>
frivoal/presto-testo
SVG/Testsuites/Mozilla/Sizing/pct-pct--px-pct.html
HTML
bsd-3-clause
539
package org.hisp.dhis.organisationunit.comparator; /* * Copyright (c) 2004-2015, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.hisp.dhis.organisationunit.OrganisationUnit; import java.util.Comparator; /** * @author Morten Olav Hansen <mortenoh@gmail.com> */ public class OrganisationUnitComparator implements Comparator<OrganisationUnit> { private int countParents( OrganisationUnit organisationUnit ) { int parents = 0; OrganisationUnit currentOrganisationUnit = organisationUnit; while ( (currentOrganisationUnit = currentOrganisationUnit.getParent()) != null ) { parents++; } return parents; } @Override public int compare( OrganisationUnit organisationUnit1, OrganisationUnit organisationUnit2 ) { Integer parents1 = countParents( organisationUnit1 ); Integer parents2 = countParents( organisationUnit2 ); return parents1.compareTo( parents2 ); } }
steffeli/inf5750-tracker-capture
dhis-api/src/main/java/org/hisp/dhis/organisationunit/comparator/OrganisationUnitComparator.java
Java
bsd-3-clause
2,471
@import import_self.c; #include "import-self-d.h" // FIXME: This should not work; names from 'a' should not be visible here. MyTypeA import_self_test_a; // FIXME: This should work but does not; names from 'b' are not actually visible here. //MyTypeC import_self_test_c; MyTypeD import_self_test_d;
santoshn/softboundcets-34
softboundcets-llvm-clang34/tools/clang/test/Modules/Inputs/submodules/import-self-b.h
C
bsd-3-clause
301
/*! \file Copyright (c) 2003, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from U.S. Dept. of Energy) All rights reserved. The source code is distributed under BSD license, see the file License.txt at the top-level directory. */ /*! @file dmemory.c * \brief Memory details * * <pre> * -- SuperLU routine (version 4.0) -- * Lawrence Berkeley National Laboratory. * June 30, 2009 * </pre> */ #include "slu_ddefs.h" /* Internal prototypes */ void *dexpand (int *, MemType,int, int, GlobalLU_t *); int dLUWorkInit (int, int, int, int **, double **, GlobalLU_t *); void copy_mem_double (int, void *, void *); void dStackCompress (GlobalLU_t *); void dSetupSpace (void *, int, GlobalLU_t *); void *duser_malloc (int, int, GlobalLU_t *); void duser_free (int, int, GlobalLU_t *); /* External prototypes (in memory.c - prec-independent) */ extern void copy_mem_int (int, void *, void *); extern void user_bcopy (char *, char *, int); /* Macros to manipulate stack */ #define StackFull(x) ( x + Glu->stack.used >= Glu->stack.size ) #define NotDoubleAlign(addr) ( (intptr_t)addr & 7 ) #define DoubleAlign(addr) ( ((intptr_t)addr + 7) & ~7L ) #define TempSpace(m, w) ( (2*w + 4 + NO_MARKER) * m * sizeof(int) + \ (w + 1) * m * sizeof(double) ) #define Reduce(alpha) ((alpha + 1) / 2) /* i.e. (alpha-1)/2 + 1 */ /*! \brief Setup the memory model to be used for factorization. * * lwork = 0: use system malloc; * lwork > 0: use user-supplied work[] space. */ void dSetupSpace(void *work, int lwork, GlobalLU_t *Glu) { if ( lwork == 0 ) { Glu->MemModel = SYSTEM; /* malloc/free */ } else if ( lwork > 0 ) { Glu->MemModel = USER; /* user provided space */ Glu->stack.used = 0; Glu->stack.top1 = 0; Glu->stack.top2 = (lwork/4)*4; /* must be word addressable */ Glu->stack.size = Glu->stack.top2; Glu->stack.array = (void *) work; } } void *duser_malloc(int bytes, int which_end, GlobalLU_t *Glu) { void *buf; if ( StackFull(bytes) ) return (NULL); if ( which_end == HEAD ) { buf = (char*) Glu->stack.array + Glu->stack.top1; Glu->stack.top1 += bytes; } else { Glu->stack.top2 -= bytes; buf = (char*) Glu->stack.array + Glu->stack.top2; } Glu->stack.used += bytes; return buf; } void duser_free(int bytes, int which_end, GlobalLU_t *Glu) { if ( which_end == HEAD ) { Glu->stack.top1 -= bytes; } else { Glu->stack.top2 += bytes; } Glu->stack.used -= bytes; } /*! \brief * * <pre> * mem_usage consists of the following fields: * - for_lu (float) * The amount of space used in bytes for the L\U data structures. * - total_needed (float) * The amount of space needed in bytes to perform factorization. * </pre> */ int dQuerySpace(SuperMatrix *L, SuperMatrix *U, mem_usage_t *mem_usage) { SCformat *Lstore; NCformat *Ustore; register int n, iword, dword, panel_size = sp_ienv(1); Lstore = L->Store; Ustore = U->Store; n = L->ncol; iword = sizeof(int); dword = sizeof(double); /* For LU factors */ mem_usage->for_lu = (float)( (4.0*n + 3.0) * iword + Lstore->nzval_colptr[n] * dword + Lstore->rowind_colptr[n] * iword ); mem_usage->for_lu += (float)( (n + 1.0) * iword + Ustore->colptr[n] * (dword + iword) ); /* Working storage to support factorization */ mem_usage->total_needed = mem_usage->for_lu + (float)( (2.0 * panel_size + 4.0 + NO_MARKER) * n * iword + (panel_size + 1.0) * n * dword ); return 0; } /* dQuerySpace */ /*! \brief * * <pre> * mem_usage consists of the following fields: * - for_lu (float) * The amount of space used in bytes for the L\U data structures. * - total_needed (float) * The amount of space needed in bytes to perform factorization. * </pre> */ int ilu_dQuerySpace(SuperMatrix *L, SuperMatrix *U, mem_usage_t *mem_usage) { SCformat *Lstore; NCformat *Ustore; register int n, panel_size = sp_ienv(1); register float iword, dword; Lstore = L->Store; Ustore = U->Store; n = L->ncol; iword = sizeof(int); dword = sizeof(double); /* For LU factors */ mem_usage->for_lu = (float)( (4.0f * n + 3.0f) * iword + Lstore->nzval_colptr[n] * dword + Lstore->rowind_colptr[n] * iword ); mem_usage->for_lu += (float)( (n + 1.0f) * iword + Ustore->colptr[n] * (dword + iword) ); /* Working storage to support factorization. ILU needs 5*n more integers than LU */ mem_usage->total_needed = mem_usage->for_lu + (float)( (2.0f * panel_size + 9.0f + NO_MARKER) * n * iword + (panel_size + 1.0f) * n * dword ); return 0; } /* ilu_dQuerySpace */ /*! \brief Allocate storage for the data structures common to all factor routines. * * <pre> * For those unpredictable size, estimate as fill_ratio * nnz(A). * Return value: * If lwork = -1, return the estimated amount of space required, plus n; * otherwise, return the amount of space actually allocated when * memory allocation failure occurred. * </pre> */ int dLUMemInit(fact_t fact, void *work, int lwork, int m, int n, int annz, int panel_size, double fill_ratio, SuperMatrix *L, SuperMatrix *U, GlobalLU_t *Glu, int **iwork, double **dwork) { int info, iword, dword; SCformat *Lstore; NCformat *Ustore; int *xsup, *supno; int *lsub, *xlsub; double *lusup; int *xlusup; double *ucol; int *usub, *xusub; int nzlmax, nzumax, nzlumax; iword = sizeof(int); dword = sizeof(double); Glu->n = n; Glu->num_expansions = 0; Glu->expanders = (ExpHeader *) SUPERLU_MALLOC( NO_MEMTYPE * sizeof(ExpHeader) ); if ( !Glu->expanders ) ABORT("SUPERLU_MALLOC fails for expanders"); if ( fact != SamePattern_SameRowPerm ) { /* Guess for L\U factors */ nzumax = nzlumax = fill_ratio * annz; nzlmax = SUPERLU_MAX(1, fill_ratio/4.) * annz; if ( lwork == -1 ) { return ( GluIntArray(n) * iword + TempSpace(m, panel_size) + (nzlmax+nzumax)*iword + (nzlumax+nzumax)*dword + n ); } else { dSetupSpace(work, lwork, Glu); } #if ( PRNTlevel >= 1 ) printf("dLUMemInit() called: fill_ratio %.0f, nzlmax %ld, nzumax %ld\n", fill_ratio, nzlmax, nzumax); fflush(stdout); #endif /* Integer pointers for L\U factors */ if ( Glu->MemModel == SYSTEM ) { xsup = intMalloc(n+1); supno = intMalloc(n+1); xlsub = intMalloc(n+1); xlusup = intMalloc(n+1); xusub = intMalloc(n+1); } else { xsup = (int *)duser_malloc((n+1) * iword, HEAD, Glu); supno = (int *)duser_malloc((n+1) * iword, HEAD, Glu); xlsub = (int *)duser_malloc((n+1) * iword, HEAD, Glu); xlusup = (int *)duser_malloc((n+1) * iword, HEAD, Glu); xusub = (int *)duser_malloc((n+1) * iword, HEAD, Glu); } lusup = (double *) dexpand( &nzlumax, LUSUP, 0, 0, Glu ); ucol = (double *) dexpand( &nzumax, UCOL, 0, 0, Glu ); lsub = (int *) dexpand( &nzlmax, LSUB, 0, 0, Glu ); usub = (int *) dexpand( &nzumax, USUB, 0, 1, Glu ); while ( !lusup || !ucol || !lsub || !usub ) { if ( Glu->MemModel == SYSTEM ) { SUPERLU_FREE(lusup); SUPERLU_FREE(ucol); SUPERLU_FREE(lsub); SUPERLU_FREE(usub); } else { duser_free((nzlumax+nzumax)*dword+(nzlmax+nzumax)*iword, HEAD, Glu); } nzlumax /= 2; nzumax /= 2; nzlmax /= 2; if ( nzlumax < annz ) { printf("Not enough memory to perform factorization.\n"); return (dmemory_usage(nzlmax, nzumax, nzlumax, n) + n); } #if ( PRNTlevel >= 1) printf("dLUMemInit() reduce size: nzlmax %ld, nzumax %ld\n", nzlmax, nzumax); fflush(stdout); #endif lusup = (double *) dexpand( &nzlumax, LUSUP, 0, 0, Glu ); ucol = (double *) dexpand( &nzumax, UCOL, 0, 0, Glu ); lsub = (int *) dexpand( &nzlmax, LSUB, 0, 0, Glu ); usub = (int *) dexpand( &nzumax, USUB, 0, 1, Glu ); } } else { /* fact == SamePattern_SameRowPerm */ Lstore = L->Store; Ustore = U->Store; xsup = Lstore->sup_to_col; supno = Lstore->col_to_sup; xlsub = Lstore->rowind_colptr; xlusup = Lstore->nzval_colptr; xusub = Ustore->colptr; nzlmax = Glu->nzlmax; /* max from previous factorization */ nzumax = Glu->nzumax; nzlumax = Glu->nzlumax; if ( lwork == -1 ) { return ( GluIntArray(n) * iword + TempSpace(m, panel_size) + (nzlmax+nzumax)*iword + (nzlumax+nzumax)*dword + n ); } else if ( lwork == 0 ) { Glu->MemModel = SYSTEM; } else { Glu->MemModel = USER; Glu->stack.top2 = (lwork/4)*4; /* must be word-addressable */ Glu->stack.size = Glu->stack.top2; } lsub = Glu->expanders[LSUB].mem = Lstore->rowind; lusup = Glu->expanders[LUSUP].mem = Lstore->nzval; usub = Glu->expanders[USUB].mem = Ustore->rowind; ucol = Glu->expanders[UCOL].mem = Ustore->nzval;; Glu->expanders[LSUB].size = nzlmax; Glu->expanders[LUSUP].size = nzlumax; Glu->expanders[USUB].size = nzumax; Glu->expanders[UCOL].size = nzumax; } Glu->xsup = xsup; Glu->supno = supno; Glu->lsub = lsub; Glu->xlsub = xlsub; Glu->lusup = (void *) lusup; Glu->xlusup = xlusup; Glu->ucol = (void *) ucol; Glu->usub = usub; Glu->xusub = xusub; Glu->nzlmax = nzlmax; Glu->nzumax = nzumax; Glu->nzlumax = nzlumax; info = dLUWorkInit(m, n, panel_size, iwork, dwork, Glu); if ( info ) return ( info + dmemory_usage(nzlmax, nzumax, nzlumax, n) + n); ++Glu->num_expansions; return 0; } /* dLUMemInit */ /*! \brief Allocate known working storage. Returns 0 if success, otherwise returns the number of bytes allocated so far when failure occurred. */ int dLUWorkInit(int m, int n, int panel_size, int **iworkptr, double **dworkptr, GlobalLU_t *Glu) { int isize, dsize, extra; double *old_ptr; int maxsuper = SUPERLU_MAX( sp_ienv(3), sp_ienv(7) ), rowblk = sp_ienv(4); isize = ( (2 * panel_size + 3 + NO_MARKER ) * m + n ) * sizeof(int); dsize = (m * panel_size + NUM_TEMPV(m,panel_size,maxsuper,rowblk)) * sizeof(double); if ( Glu->MemModel == SYSTEM ) *iworkptr = (int *) intCalloc(isize/sizeof(int)); else *iworkptr = (int *) duser_malloc(isize, TAIL, Glu); if ( ! *iworkptr ) { fprintf(stderr, "dLUWorkInit: malloc fails for local iworkptr[]\n"); return (isize + n); } if ( Glu->MemModel == SYSTEM ) *dworkptr = (double *) SUPERLU_MALLOC(dsize); else { *dworkptr = (double *) duser_malloc(dsize, TAIL, Glu); if ( NotDoubleAlign(*dworkptr) ) { old_ptr = *dworkptr; *dworkptr = (double*) DoubleAlign(*dworkptr); *dworkptr = (double*) ((double*)*dworkptr - 1); extra = (char*)old_ptr - (char*)*dworkptr; #ifdef DEBUG printf("dLUWorkInit: not aligned, extra %d\n", extra); #endif Glu->stack.top2 -= extra; Glu->stack.used += extra; } } if ( ! *dworkptr ) { fprintf(stderr, "malloc fails for local dworkptr[]."); return (isize + dsize + n); } return 0; } /*! \brief Set up pointers for real working arrays. */ void dSetRWork(int m, int panel_size, double *dworkptr, double **dense, double **tempv) { double zero = 0.0; int maxsuper = SUPERLU_MAX( sp_ienv(3), sp_ienv(7) ), rowblk = sp_ienv(4); *dense = dworkptr; *tempv = *dense + panel_size*m; dfill (*dense, m * panel_size, zero); dfill (*tempv, NUM_TEMPV(m,panel_size,maxsuper,rowblk), zero); } /*! \brief Free the working storage used by factor routines. */ void dLUWorkFree(int *iwork, double *dwork, GlobalLU_t *Glu) { if ( Glu->MemModel == SYSTEM ) { SUPERLU_FREE (iwork); SUPERLU_FREE (dwork); } else { Glu->stack.used -= (Glu->stack.size - Glu->stack.top2); Glu->stack.top2 = Glu->stack.size; /* dStackCompress(Glu); */ } SUPERLU_FREE (Glu->expanders); Glu->expanders = NULL; } /*! \brief Expand the data structures for L and U during the factorization. * * <pre> * Return value: 0 - successful return * > 0 - number of bytes allocated when run out of space * </pre> */ int dLUMemXpand(int jcol, int next, /* number of elements currently in the factors */ MemType mem_type, /* which type of memory to expand */ int *maxlen, /* modified - maximum length of a data structure */ GlobalLU_t *Glu /* modified - global LU data structures */ ) { void *new_mem; #ifdef DEBUG printf("dLUMemXpand(): jcol %d, next %d, maxlen %d, MemType %d\n", jcol, next, *maxlen, mem_type); #endif if (mem_type == USUB) new_mem = dexpand(maxlen, mem_type, next, 1, Glu); else new_mem = dexpand(maxlen, mem_type, next, 0, Glu); if ( !new_mem ) { int nzlmax = Glu->nzlmax; int nzumax = Glu->nzumax; int nzlumax = Glu->nzlumax; fprintf(stderr, "Can't expand MemType %d: jcol %d\n", mem_type, jcol); return (dmemory_usage(nzlmax, nzumax, nzlumax, Glu->n) + Glu->n); } switch ( mem_type ) { case LUSUP: Glu->lusup = (void *) new_mem; Glu->nzlumax = *maxlen; break; case UCOL: Glu->ucol = (void *) new_mem; Glu->nzumax = *maxlen; break; case LSUB: Glu->lsub = (int *) new_mem; Glu->nzlmax = *maxlen; break; case USUB: Glu->usub = (int *) new_mem; Glu->nzumax = *maxlen; break; } return 0; } void copy_mem_double(int howmany, void *old, void *new) { register int i; double *dold = old; double *dnew = new; for (i = 0; i < howmany; i++) dnew[i] = dold[i]; } /*! \brief Expand the existing storage to accommodate more fill-ins. */ void *dexpand ( int *prev_len, /* length used from previous call */ MemType type, /* which part of the memory to expand */ int len_to_copy, /* size of the memory to be copied to new store */ int keep_prev, /* = 1: use prev_len; = 0: compute new_len to expand */ GlobalLU_t *Glu /* modified - global LU data structures */ ) { float EXPAND = 1.5; float alpha; void *new_mem, *old_mem; int new_len, tries, lword, extra, bytes_to_copy; ExpHeader *expanders = Glu->expanders; /* Array of 4 types of memory */ alpha = EXPAND; if ( Glu->num_expansions == 0 || keep_prev ) { /* First time allocate requested */ new_len = *prev_len; } else { new_len = alpha * *prev_len; } if ( type == LSUB || type == USUB ) lword = sizeof(int); else lword = sizeof(double); if ( Glu->MemModel == SYSTEM ) { new_mem = (void *) SUPERLU_MALLOC((size_t)new_len * lword); if ( Glu->num_expansions != 0 ) { tries = 0; if ( keep_prev ) { if ( !new_mem ) return (NULL); } else { while ( !new_mem ) { if ( ++tries > 10 ) return (NULL); alpha = Reduce(alpha); new_len = alpha * *prev_len; new_mem = (void *) SUPERLU_MALLOC((size_t)new_len * lword); } } if ( type == LSUB || type == USUB ) { copy_mem_int(len_to_copy, expanders[type].mem, new_mem); } else { copy_mem_double(len_to_copy, expanders[type].mem, new_mem); } SUPERLU_FREE (expanders[type].mem); } expanders[type].mem = (void *) new_mem; } else { /* MemModel == USER */ if ( Glu->num_expansions == 0 ) { new_mem = duser_malloc(new_len * lword, HEAD, Glu); if ( NotDoubleAlign(new_mem) && (type == LUSUP || type == UCOL) ) { old_mem = new_mem; new_mem = (void *)DoubleAlign(new_mem); extra = (char*)new_mem - (char*)old_mem; #ifdef DEBUG printf("expand(): not aligned, extra %d\n", extra); #endif Glu->stack.top1 += extra; Glu->stack.used += extra; } expanders[type].mem = (void *) new_mem; } else { tries = 0; extra = (new_len - *prev_len) * lword; if ( keep_prev ) { if ( StackFull(extra) ) return (NULL); } else { while ( StackFull(extra) ) { if ( ++tries > 10 ) return (NULL); alpha = Reduce(alpha); new_len = alpha * *prev_len; extra = (new_len - *prev_len) * lword; } } if ( type != USUB ) { new_mem = (void*)((char*)expanders[type + 1].mem + extra); bytes_to_copy = (char*)Glu->stack.array + Glu->stack.top1 - (char*)expanders[type + 1].mem; user_bcopy(expanders[type+1].mem, new_mem, bytes_to_copy); if ( type < USUB ) { Glu->usub = expanders[USUB].mem = (void*)((char*)expanders[USUB].mem + extra); } if ( type < LSUB ) { Glu->lsub = expanders[LSUB].mem = (void*)((char*)expanders[LSUB].mem + extra); } if ( type < UCOL ) { Glu->ucol = expanders[UCOL].mem = (void*)((char*)expanders[UCOL].mem + extra); } Glu->stack.top1 += extra; Glu->stack.used += extra; if ( type == UCOL ) { Glu->stack.top1 += extra; /* Add same amount for USUB */ Glu->stack.used += extra; } } /* if ... */ } /* else ... */ } expanders[type].size = new_len; *prev_len = new_len; if ( Glu->num_expansions ) ++Glu->num_expansions; return (void *) expanders[type].mem; } /* dexpand */ /*! \brief Compress the work[] array to remove fragmentation. */ void dStackCompress(GlobalLU_t *Glu) { register int iword, dword, ndim; char *last, *fragment; int *ifrom, *ito; double *dfrom, *dto; int *xlsub, *lsub, *xusub, *usub, *xlusup; double *ucol, *lusup; iword = sizeof(int); dword = sizeof(double); ndim = Glu->n; xlsub = Glu->xlsub; lsub = Glu->lsub; xusub = Glu->xusub; usub = Glu->usub; xlusup = Glu->xlusup; ucol = Glu->ucol; lusup = Glu->lusup; dfrom = ucol; dto = (double *)((char*)lusup + xlusup[ndim] * dword); copy_mem_double(xusub[ndim], dfrom, dto); ucol = dto; ifrom = lsub; ito = (int *) ((char*)ucol + xusub[ndim] * iword); copy_mem_int(xlsub[ndim], ifrom, ito); lsub = ito; ifrom = usub; ito = (int *) ((char*)lsub + xlsub[ndim] * iword); copy_mem_int(xusub[ndim], ifrom, ito); usub = ito; last = (char*)usub + xusub[ndim] * iword; fragment = (char*) (((char*)Glu->stack.array + Glu->stack.top1) - last); Glu->stack.used -= (uintptr_t) fragment; Glu->stack.top1 -= (uintptr_t) fragment; Glu->ucol = ucol; Glu->lsub = lsub; Glu->usub = usub; #ifdef DEBUG printf("dStackCompress: fragment %d\n", fragment); /* for (last = 0; last < ndim; ++last) print_lu_col("After compress:", last, 0);*/ #endif } /*! \brief Allocate storage for original matrix A */ void dallocateA(int n, int nnz, double **a, int **asub, int **xa) { *a = (double *) doubleMalloc(nnz); *asub = (int *) intMalloc(nnz); *xa = (int *) intMalloc(n+1); } double *doubleMalloc(size_t n) { double *buf; buf = (double *) SUPERLU_MALLOC(n * (size_t) sizeof(double)); if ( !buf ) { ABORT("SUPERLU_MALLOC failed for buf in doubleMalloc()\n"); } return (buf); } double *doubleCalloc(size_t n) { double *buf; register size_t i; double zero = 0.0; buf = (double *) SUPERLU_MALLOC(n * (size_t) sizeof(double)); if ( !buf ) { ABORT("SUPERLU_MALLOC failed for buf in doubleCalloc()\n"); } for (i = 0; i < n; ++i) buf[i] = zero; return (buf); } int dmemory_usage(const int nzlmax, const int nzumax, const int nzlumax, const int n) { register int iword, dword; iword = sizeof(int); dword = sizeof(double); return (10 * n * iword + nzlmax * iword + nzumax * (iword + dword) + nzlumax * dword); }
scipy/scipy
scipy/sparse/linalg/_dsolve/SuperLU/SRC/dmemory.c
C
bsd-3-clause
19,820
// Example program for the linear_least_square_fitting function // on a set of 3D triangles #include <CGAL/Simple_cartesian.h> #include <CGAL/linear_least_squares_fitting_3.h> #include <vector> typedef double FT; typedef CGAL::Simple_cartesian<FT> K; typedef K::Line_3 Line; typedef K::Plane_3 Plane; typedef K::Point_3 Point; typedef K::Triangle_3 Triangle; int main(void) { std::vector<Triangle> triangles; Point a(1.0,2.0,3.0); Point b(4.0,0.0,6.0); Point c(7.0,8.0,9.0); Point d(8.0,7.0,6.0); Point e(5.0,3.0,4.0); triangles.push_back(Triangle(a,b,c)); triangles.push_back(Triangle(a,b,d)); triangles.push_back(Triangle(d,e,c)); Line line; Plane plane; // fit plane to whole triangles linear_least_squares_fitting_3(triangles.begin(),triangles.end(),plane,CGAL::Dimension_tag<2>()); // fit line to triangle vertices linear_least_squares_fitting_3(triangles.begin(),triangles.end(),line, CGAL::Dimension_tag<0>()); return 0; }
hlzz/dotfiles
graphics/cgal/Principal_component_analysis/examples/Principal_component_analysis/linear_least_squares_fitting_triangles_3.cpp
C++
bsd-3-clause
1,060
/***************************************************************************** Copyright (c) 2014, Intel Corp. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ***************************************************************************** * Contents: Native high-level C interface to LAPACK function zsytrf_aa * Author: Intel Corporation * Generated November 2017 *****************************************************************************/ #include "lapacke_utils.h" lapack_int LAPACKE_zsytrf_aa_2stage( int matrix_layout, char uplo, lapack_int n, lapack_complex_double* a, lapack_int lda, lapack_complex_double* tb, lapack_int ltb, lapack_int* ipiv, lapack_int* ipiv2 ) { lapack_int info = 0; lapack_int lwork = -1; lapack_complex_double* work = NULL; lapack_complex_double work_query; if( matrix_layout != LAPACK_COL_MAJOR && matrix_layout != LAPACK_ROW_MAJOR ) { LAPACKE_xerbla( "LAPACKE_zsytrf_aa_2stage", -1 ); return -1; } #ifndef LAPACK_DISABLE_NAN_CHECK if( LAPACKE_get_nancheck() ) { /* Optionally check input matrices for NaNs */ if( LAPACKE_zsy_nancheck( matrix_layout, uplo, n, a, lda ) ) { return -5; } if( LAPACKE_zge_nancheck( matrix_layout, 4*n, 1, tb, ltb ) ) { return -7; } } #endif /* Query optimal working array(s) size */ info = LAPACKE_zsytrf_aa_2stage_work( matrix_layout, uplo, n, a, lda, tb, ltb, ipiv, ipiv2, &work_query, lwork ); if( info != 0 ) { goto exit_level_0; } lwork = LAPACK_Z2INT( work_query ); /* Allocate memory for work arrays */ work = (lapack_complex_double*) LAPACKE_malloc( sizeof(lapack_complex_double) * lwork ); if( work == NULL ) { info = LAPACK_WORK_MEMORY_ERROR; goto exit_level_0; } /* Call middle-level interface */ info = LAPACKE_zsytrf_aa_2stage_work( matrix_layout, uplo, n, a, lda, tb, ltb, ipiv, ipiv2, work, lwork ); /* Release memory and exit */ LAPACKE_free( work ); exit_level_0: if( info == LAPACK_WORK_MEMORY_ERROR ) { LAPACKE_xerbla( "LAPACKE_zsytrf_aa_2stage", info ); } return info; }
xianyi/OpenBLAS
lapack-netlib/LAPACKE/src/lapacke_zsytrf_aa_2stage.c
C
bsd-3-clause
3,820
<?php /* * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * This software consists of voluntary contributions made by many individuals * and is licensed under the MIT license. */ namespace OcraServiceManagerTest\ServiceFactory; use OcraServiceManager\ServiceFactory\ApplicationFactory; use Zend\ServiceManager\ServiceManager; use PHPUnit_Framework_TestCase; /** * @author Marco Pivetta <ocramius@gmail.com> * @license MIT */ class ApplicationFactoryTest extends PHPUnit_Framework_TestCase { /** * @covers \OcraServiceManager\ServiceFactory\ApplicationFactory::createService */ public function testCreateService() { $factory = new ApplicationFactory(); $serviceManager = new ServiceManager(); $replacedServiceManager = $this->getMock('Zend\ServiceManager\ServiceManager'); $request = $this->getMock('Zend\StdLib\RequestInterface'); $response = $this->getMock('Zend\StdLib\ResponseInterface'); $evm = $this->getMock('Zend\EventManager\EventManagerInterface'); $replacedServiceManager ->expects($this->any()) ->method('get') ->will($this->returnCallback(function ($name) use ($request, $response, $evm) { $services = array( 'Request' => $request, 'Response' => $response, 'EventManager' => $evm, ); return $services[$name]; })); $serviceManager->setService('Config', array()); $serviceManager->setService('OcraServiceManager\\ServiceManager', $replacedServiceManager); $application = $factory->createService($serviceManager); $appServiceManager = $application->getServiceManager(); $this->assertSame($replacedServiceManager, $appServiceManager); } }
marinescudan79/setup
vendor/ocramius/ocra-service-manager/tests/OcraServiceManagerTest/ServiceFactory/ApplicationFactoryTest.php
PHP
bsd-3-clause
2,664
/*- * Copyright (C) 2002 Benno Rice. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY Benno Rice ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL TOOLS GMBH BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * $FreeBSD$ */ #include "opt_ddb.h" #include <sys/param.h> #include <sys/systm.h> #include <sys/kdb.h> #include <sys/kernel.h> #include <sys/module.h> #include <sys/malloc.h> #include <sys/bus.h> #include <machine/bus.h> #include <sys/rman.h> #include <machine/resource.h> #include <dev/ofw/openfirm.h> #include <powerpc/powermac/maciovar.h> struct pswitch_softc { int sc_irqrid; struct resource *sc_irq; void *sc_ih; }; static int pswitch_probe(device_t); static int pswitch_attach(device_t); static int pswitch_intr(void *); static device_method_t pswitch_methods[] = { /* Device interface */ DEVMETHOD(device_probe, pswitch_probe), DEVMETHOD(device_attach, pswitch_attach), { 0, 0 } }; static driver_t pswitch_driver = { "pswitch", pswitch_methods, sizeof(struct pswitch_softc) }; static devclass_t pswitch_devclass; DRIVER_MODULE(pswitch, macio, pswitch_driver, pswitch_devclass, 0, 0); static int pswitch_probe(device_t dev) { char *type = macio_get_devtype(dev); if (strcmp(type, "gpio") != 0) return (ENXIO); device_set_desc(dev, "GPIO Programmer's Switch"); return (0); } static int pswitch_attach(device_t dev) { struct pswitch_softc *sc; phandle_t node, child; char type[32]; u_int irq[2]; sc = device_get_softc(dev); node = macio_get_node(dev); for (child = OF_child(node); child != 0; child = OF_peer(child)) { if (OF_getprop(child, "device_type", type, 32) == -1) continue; if (strcmp(type, "programmer-switch") == 0) break; } if (child == 0) { device_printf(dev, "could not find correct node\n"); return (ENXIO); } if (OF_getprop(child, "interrupts", irq, sizeof(irq)) == -1) { device_printf(dev, "could not get interrupt\n"); return (ENXIO); } sc->sc_irqrid = 0; sc->sc_irq = bus_alloc_resource(dev, SYS_RES_IRQ, &sc->sc_irqrid, irq[0], irq[0], 1, RF_ACTIVE); if (sc->sc_irq == NULL) { device_printf(dev, "could not allocate interrupt\n"); return (ENXIO); } if (bus_setup_intr(dev, sc->sc_irq, INTR_TYPE_MISC, pswitch_intr, NULL, dev, &sc->sc_ih) != 0) { device_printf(dev, "could not setup interrupt\n"); bus_release_resource(dev, SYS_RES_IRQ, sc->sc_irqrid, sc->sc_irq); return (ENXIO); } return (0); } static int pswitch_intr(void *arg) { device_t dev; dev = (device_t)arg; kdb_enter(KDB_WHY_POWERPC, device_get_nameunit(dev)); return (FILTER_HANDLED); }
jhbsz/OSI-OS
sys/powerpc/powermac/pswitch.c
C
bsd-3-clause
3,705
# Copyright (c) 2019 MetPy Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause """Vendor core functionality used from xarray. This code has been reproduced with modification under the terms of the Apache License, Version 2.0 (notice included below). Copyright 2014-2019, xarray Developers Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ def expanded_indexer(key, ndim): """Expand an indexer to a tuple with length ndim. Given a key for indexing an ndarray, return an equivalent key which is a tuple with length equal to the number of dimensions. The expansion is done by replacing all `Ellipsis` items with the right number of full slices and then padding the key with full slices so that it reaches the appropriate dimensionality. """ if not isinstance(key, tuple): # numpy treats non-tuple keys equivalent to tuples of length 1 key = (key,) new_key = [] # handling Ellipsis right is a little tricky, see: # http://docs.scipy.org/doc/numpy/reference/arrays.indexing.html#advanced-indexing found_ellipsis = False for k in key: if k is Ellipsis: if not found_ellipsis: new_key.extend((ndim + 1 - len(key)) * [slice(None)]) found_ellipsis = True else: new_key.append(slice(None)) else: new_key.append(k) if len(new_key) > ndim: raise IndexError('too many indices') new_key.extend((ndim - len(new_key)) * [slice(None)]) return tuple(new_key) def is_dict_like(value): """Check if value is dict-like.""" return hasattr(value, 'keys') and hasattr(value, '__getitem__') def either_dict_or_kwargs(pos_kwargs, kw_kwargs, func_name): """Ensure dict-like argument from either positional or keyword arguments.""" if pos_kwargs is not None: if not is_dict_like(pos_kwargs): raise ValueError('the first argument to .{} must be a ' 'dictionary'.format(func_name)) if kw_kwargs: raise ValueError('cannot specify both keyword and positional arguments to ' '.{}'.format(func_name)) return pos_kwargs else: return kw_kwargs
ahaberlie/MetPy
src/metpy/_vendor/xarray.py
Python
bsd-3-clause
2,816
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'ImageAttachment' db.create_table('upload_imageattachment', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('file', self.gf('django.db.models.fields.files.ImageField')(max_length=250)), ('thumbnail', self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True)), ('creator', self.gf('django.db.models.fields.related.ForeignKey')(related_name='image_attachments', to=orm['auth.User'])), ('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])), ('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()), )) db.send_create_signal('upload', ['ImageAttachment']) def backwards(self, orm): # Deleting model 'ImageAttachment' db.delete_table('upload_imageattachment') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'upload.imageattachment': { 'Meta': {'object_name': 'ImageAttachment'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'image_attachments'", 'to': "orm['auth.User']"}), 'file': ('django.db.models.fields.files.ImageField', [], {'max_length': '250'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}), 'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True'}) } } complete_apps = ['upload']
safwanrahman/linuxdesh
kitsune/upload/migrations/0001_initial.py
Python
bsd-3-clause
4,996
<!DOCTYPE html> <title>flexbox | flex: N 0 N%</title> <style> div { background: blue; margin: 1em 0; border: 1px solid black; height: 8em; width: 40em; } span { background: yellow; margin: 1em 0; width: 10em; height: 6em; display: inline-block; } span:nth-child(2) {background: pink;} span:nth-child(3) {background: lightblue;} span:nth-child(4) {background: grey;} </style> <div> <span>one</span><span>two</span><span>three</span><span>four</span> </div>
frivoal/presto-testo
css/flexbox/flex-N-0-Npercent-ref.html
HTML
bsd-3-clause
468
// ============================================================================= // PROJECT CHRONO - http://projectchrono.org // // Copyright (c) 2016 projectchrono.org // All rights reserved. // // Use of this source code is governed by a BSD-style license that can be found // in the LICENSE file at the top level of the distribution and at // http://projectchrono.org/license-chrono.txt. // // ============================================================================= // Authors: Hammad Mazhar // ============================================================================= // // Description: Vectorized implementation of a 3d vector // ============================================================================= #pragma once #include "chrono_parallel/math/real.h" #include "chrono_parallel/math/real2.h" #if !defined(__CUDACC__) #include "chrono_parallel/math/sse.h" #endif namespace chrono { /// @addtogroup parallel_math /// @{ /// Chrono::Parallel triplet (3-dimensional vector). class CH_PARALLEL_API real3 { public: CUDA_HOST_DEVICE inline real3() { array[3] = 0; } CUDA_HOST_DEVICE inline explicit real3(real a) { array[0] = a; array[1] = a; array[2] = a; array[3] = 0; } CUDA_HOST_DEVICE inline real3(real a, real b, real c) { array[0] = a; array[1] = b; array[2] = c; array[3] = 0; } CUDA_HOST_DEVICE inline real3(const real3& v) { array[0] = v.x; array[1] = v.y; array[2] = v.z; array[3] = 0; } CUDA_HOST_DEVICE inline real operator[](unsigned int i) const { return array[i]; } CUDA_HOST_DEVICE inline real& operator[](unsigned int i) { return array[i]; } CUDA_HOST_DEVICE inline real3& operator=(const real3& rhs) { x = rhs.x; y = rhs.y; z = rhs.z; w = 0; return *this; // Return a reference to myself. } #if defined(USE_AVX) inline real3(__m256d m) { _mm256_storeu_pd(&array[0], m); } inline operator __m256d() const { return _mm256_loadu_pd(&array[0]); } inline real3& operator=(const __m256d& rhs) { _mm256_storeu_pd(&array[0], rhs); return *this; } static inline __m256d Set(real x) { return _mm256_set1_pd(x); } static inline __m256d Set(real x, real y, real z) { return _mm256_setr_pd(x, y, z, 0.0); } #elif defined(USE_SSE) inline real3(__m128 m) { _mm_storeu_ps(&array[0], m); } inline operator __m128() const { return _mm_loadu_ps(&array[0]); } inline real3& operator=(const __m128& rhs) { _mm_storeu_ps(&array[0], rhs); return *this; } static inline __m128 Set(real x) { return _mm_set1_ps(x); } static inline __m128 Set(real x, real y, real z) { return _mm_setr_ps(x, y, z, 0.0f); } #else #endif // ======================================================================================== union { real array[4]; struct { real x, y, z, w; }; }; }; CUDA_HOST_DEVICE CH_PARALLEL_API real3 Set3(real x); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Set3(real x, real y, real z); CUDA_HOST_DEVICE CH_PARALLEL_API real3 operator+(const real3& a, real b); CUDA_HOST_DEVICE CH_PARALLEL_API real3 operator-(const real3& a, real b); CUDA_HOST_DEVICE CH_PARALLEL_API real3 operator*(const real3& a, real b); CUDA_HOST_DEVICE CH_PARALLEL_API real3 operator/(const real3& a, real b); CUDA_HOST_DEVICE CH_PARALLEL_API real3 operator+(const real3& a, const real3& b); CUDA_HOST_DEVICE CH_PARALLEL_API real3 operator-(const real3& a, const real3& b); CUDA_HOST_DEVICE CH_PARALLEL_API real3 operator*(const real3& a, const real3& b); CUDA_HOST_DEVICE CH_PARALLEL_API real3 operator/(const real3& a, const real3& b); CUDA_HOST_DEVICE CH_PARALLEL_API OPERATOR_EQUALS_PROTO(*, real, real3); CUDA_HOST_DEVICE CH_PARALLEL_API OPERATOR_EQUALS_PROTO(/, real, real3); CUDA_HOST_DEVICE CH_PARALLEL_API OPERATOR_EQUALS_PROTO(+, real, real3); CUDA_HOST_DEVICE CH_PARALLEL_API OPERATOR_EQUALS_PROTO(-, real, real3); CUDA_HOST_DEVICE CH_PARALLEL_API OPERATOR_EQUALS_PROTO(*, real3, real3); CUDA_HOST_DEVICE CH_PARALLEL_API OPERATOR_EQUALS_PROTO(/, real3, real3); CUDA_HOST_DEVICE CH_PARALLEL_API OPERATOR_EQUALS_PROTO(+, real3, real3); CUDA_HOST_DEVICE CH_PARALLEL_API OPERATOR_EQUALS_PROTO(-, real3, real3); CUDA_HOST_DEVICE CH_PARALLEL_API real3 operator-(const real3& a); CUDA_HOST_DEVICE CH_PARALLEL_API real3 operator*(real lhs, const real3& rhs); CUDA_HOST_DEVICE CH_PARALLEL_API real3 operator/(real lhs, const real3& rhs); CUDA_HOST_DEVICE CH_PARALLEL_API bool operator<(const real3& lhs, const real3& rhs); CUDA_HOST_DEVICE CH_PARALLEL_API bool operator>(const real3& lhs, const real3& rhs); CUDA_HOST_DEVICE CH_PARALLEL_API bool operator==(const real3& lhs, const real3& rhs); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Cross(const real3& b, const real3& c); CUDA_HOST_DEVICE CH_PARALLEL_API real Dot(const real3& v1, const real3& v2); CUDA_HOST_DEVICE CH_PARALLEL_API real Dot(const real3& v); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Normalize(const real3& v); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Sqrt(const real3& v); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Round(const real3& v); CUDA_HOST_DEVICE CH_PARALLEL_API real Length(const real3& v); CUDA_HOST_DEVICE CH_PARALLEL_API real Length2(const real3& v1); CUDA_HOST_DEVICE CH_PARALLEL_API real SafeLength(const real3& v); CUDA_HOST_DEVICE CH_PARALLEL_API real3 SafeNormalize(const real3& v, const real3& safe = real3(0)); CUDA_HOST_DEVICE CH_PARALLEL_API real Max(const real3& a); CUDA_HOST_DEVICE CH_PARALLEL_API real Min(const real3& a); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Max(const real3& a, const real3& b); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Min(const real3& a, const real3& b); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Max(const real3& a, const real& b); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Min(const real3& a, const real& b); CUDA_HOST_DEVICE CH_PARALLEL_API bool IsZero(const real3& v); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Abs(const real3& v); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Sign(const real3& v); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Clamp(const real3& v, real max_length); CUDA_HOST_DEVICE CH_PARALLEL_API real3 Clamp(const real3& a, const real3& clamp_min, const real3& clamp_max); CUDA_HOST_DEVICE CH_PARALLEL_API real3 OrthogonalVector(const real3& v); CUDA_HOST_DEVICE CH_PARALLEL_API real3 UnitOrthogonalVector(const real3& v); CUDA_HOST_DEVICE CH_PARALLEL_API void Sort(real& a, real& b, real& c); CUDA_HOST_DEVICE CH_PARALLEL_API void Print(real3 v, const char* name); /// @} parallel_math } // end namespace chrono
amelmquist/chrono
src/chrono_parallel/math/real3.h
C
bsd-3-clause
6,640
package spark import java.io._ import java.net._ import java.util.{Locale, Random, UUID} import java.util.concurrent.{Executors, ThreadFactory, ThreadPoolExecutor} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{Path, FileSystem, FileUtil} import scala.collection.mutable.ArrayBuffer import scala.collection.JavaConversions._ import scala.io.Source import com.google.common.io.Files import com.google.common.util.concurrent.ThreadFactoryBuilder import scala.Some import spark.serializer.SerializerInstance /** * Various utility methods used by Spark. */ private object Utils extends Logging { /** Serialize an object using Java serialization */ def serialize[T](o: T): Array[Byte] = { val bos = new ByteArrayOutputStream() val oos = new ObjectOutputStream(bos) oos.writeObject(o) oos.close() return bos.toByteArray } /** Deserialize an object using Java serialization */ def deserialize[T](bytes: Array[Byte]): T = { val bis = new ByteArrayInputStream(bytes) val ois = new ObjectInputStream(bis) return ois.readObject.asInstanceOf[T] } /** Deserialize an object using Java serialization and the given ClassLoader */ def deserialize[T](bytes: Array[Byte], loader: ClassLoader): T = { val bis = new ByteArrayInputStream(bytes) val ois = new ObjectInputStream(bis) { override def resolveClass(desc: ObjectStreamClass) = Class.forName(desc.getName, false, loader) } return ois.readObject.asInstanceOf[T] } def isAlpha(c: Char): Boolean = { (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') } /** Split a string into words at non-alphabetic characters */ def splitWords(s: String): Seq[String] = { val buf = new ArrayBuffer[String] var i = 0 while (i < s.length) { var j = i while (j < s.length && isAlpha(s.charAt(j))) { j += 1 } if (j > i) { buf += s.substring(i, j) } i = j while (i < s.length && !isAlpha(s.charAt(i))) { i += 1 } } return buf } /** Create a temporary directory inside the given parent directory */ def createTempDir(root: String = System.getProperty("java.io.tmpdir")): File = { var attempts = 0 val maxAttempts = 10 var dir: File = null while (dir == null) { attempts += 1 if (attempts > maxAttempts) { throw new IOException("Failed to create a temp directory after " + maxAttempts + " attempts!") } try { dir = new File(root, "spark-" + UUID.randomUUID.toString) if (dir.exists() || !dir.mkdirs()) { dir = null } } catch { case e: IOException => ; } } // Add a shutdown hook to delete the temp dir when the JVM exits Runtime.getRuntime.addShutdownHook(new Thread("delete Spark temp dir " + dir) { override def run() { Utils.deleteRecursively(dir) } }) return dir } /** Copy all data from an InputStream to an OutputStream */ def copyStream(in: InputStream, out: OutputStream, closeStreams: Boolean = false) { val buf = new Array[Byte](8192) var n = 0 while (n != -1) { n = in.read(buf) if (n != -1) { out.write(buf, 0, n) } } if (closeStreams) { in.close() out.close() } } /** * Download a file requested by the executor. Supports fetching the file in a variety of ways, * including HTTP, HDFS and files on a standard filesystem, based on the URL parameter. * * Throws SparkException if the target file already exists and has different contents than * the requested file. */ def fetchFile(url: String, targetDir: File) { val filename = url.split("/").last val tempDir = getLocalDir val tempFile = File.createTempFile("fetchFileTemp", null, new File(tempDir)) val targetFile = new File(targetDir, filename) val uri = new URI(url) uri.getScheme match { case "http" | "https" | "ftp" => logInfo("Fetching " + url + " to " + tempFile) val in = new URL(url).openStream() val out = new FileOutputStream(tempFile) Utils.copyStream(in, out, true) if (targetFile.exists && !Files.equal(tempFile, targetFile)) { tempFile.delete() throw new SparkException("File " + targetFile + " exists and does not match contents of" + " " + url) } else { Files.move(tempFile, targetFile) } case "file" | null => val sourceFile = if (uri.isAbsolute) { new File(uri) } else { new File(url) } if (targetFile.exists && !Files.equal(sourceFile, targetFile)) { throw new SparkException("File " + targetFile + " exists and does not match contents of" + " " + url) } else { // Remove the file if it already exists targetFile.delete() // Symlink the file locally. if (uri.isAbsolute) { // url is absolute, i.e. it starts with "file:///". Extract the source // file's absolute path from the url. val sourceFile = new File(uri) logInfo("Symlinking " + sourceFile.getAbsolutePath + " to " + targetFile.getAbsolutePath) FileUtil.symLink(sourceFile.getAbsolutePath, targetFile.getAbsolutePath) } else { // url is not absolute, i.e. itself is the path to the source file. logInfo("Symlinking " + url + " to " + targetFile.getAbsolutePath) FileUtil.symLink(url, targetFile.getAbsolutePath) } } case _ => // Use the Hadoop filesystem library, which supports file://, hdfs://, s3://, and others val uri = new URI(url) val conf = new Configuration() val fs = FileSystem.get(uri, conf) val in = fs.open(new Path(uri)) val out = new FileOutputStream(tempFile) Utils.copyStream(in, out, true) if (targetFile.exists && !Files.equal(tempFile, targetFile)) { tempFile.delete() throw new SparkException("File " + targetFile + " exists and does not match contents of" + " " + url) } else { Files.move(tempFile, targetFile) } } // Decompress the file if it's a .tar or .tar.gz if (filename.endsWith(".tar.gz") || filename.endsWith(".tgz")) { logInfo("Untarring " + filename) Utils.execute(Seq("tar", "-xzf", filename), targetDir) } else if (filename.endsWith(".tar")) { logInfo("Untarring " + filename) Utils.execute(Seq("tar", "-xf", filename), targetDir) } // Make the file executable - That's necessary for scripts FileUtil.chmod(targetFile.getAbsolutePath, "a+x") } /** * Get a temporary directory using Spark's spark.local.dir property, if set. This will always * return a single directory, even though the spark.local.dir property might be a list of * multiple paths. */ def getLocalDir: String = { System.getProperty("spark.local.dir", System.getProperty("java.io.tmpdir")).split(',')(0) } /** * Shuffle the elements of a collection into a random order, returning the * result in a new collection. Unlike scala.util.Random.shuffle, this method * uses a local random number generator, avoiding inter-thread contention. */ def randomize[T: ClassManifest](seq: TraversableOnce[T]): Seq[T] = { randomizeInPlace(seq.toArray) } /** * Shuffle the elements of an array into a random order, modifying the * original array. Returns the original array. */ def randomizeInPlace[T](arr: Array[T], rand: Random = new Random): Array[T] = { for (i <- (arr.length - 1) to 1 by -1) { val j = rand.nextInt(i) val tmp = arr(j) arr(j) = arr(i) arr(i) = tmp } arr } /** * Get the local host's IP address in dotted-quad format (e.g. 1.2.3.4). */ lazy val localIpAddress: String = findLocalIpAddress() private def findLocalIpAddress(): String = { val defaultIpOverride = System.getenv("SPARK_LOCAL_IP") if (defaultIpOverride != null) { defaultIpOverride } else { val address = InetAddress.getLocalHost if (address.isLoopbackAddress) { // Address resolves to something like 127.0.1.1, which happens on Debian; try to find // a better address using the local network interfaces for (ni <- NetworkInterface.getNetworkInterfaces) { for (addr <- ni.getInetAddresses if !addr.isLinkLocalAddress && !addr.isLoopbackAddress && addr.isInstanceOf[Inet4Address]) { // We've found an address that looks reasonable! logWarning("Your hostname, " + InetAddress.getLocalHost.getHostName + " resolves to" + " a loopback address: " + address.getHostAddress + "; using " + addr.getHostAddress + " instead (on interface " + ni.getName + ")") logWarning("Set SPARK_LOCAL_IP if you need to bind to another address") return addr.getHostAddress } } logWarning("Your hostname, " + InetAddress.getLocalHost.getHostName + " resolves to" + " a loopback address: " + address.getHostAddress + ", but we couldn't find any" + " external IP address!") logWarning("Set SPARK_LOCAL_IP if you need to bind to another address") } address.getHostAddress } } private var customHostname: Option[String] = None /** * Allow setting a custom host name because when we run on Mesos we need to use the same * hostname it reports to the master. */ def setCustomHostname(hostname: String) { customHostname = Some(hostname) } /** * Get the local machine's hostname. */ def localHostName(): String = { customHostname.getOrElse(InetAddress.getLocalHost.getHostName) } private[spark] val daemonThreadFactory: ThreadFactory = new ThreadFactoryBuilder().setDaemon(true).build() /** * Wrapper over newCachedThreadPool. */ def newDaemonCachedThreadPool(): ThreadPoolExecutor = Executors.newCachedThreadPool(daemonThreadFactory).asInstanceOf[ThreadPoolExecutor] /** * Return the string to tell how long has passed in seconds. The passing parameter should be in * millisecond. */ def getUsedTimeMs(startTimeMs: Long): String = { return " " + (System.currentTimeMillis - startTimeMs) + " ms" } /** * Wrapper over newFixedThreadPool. */ def newDaemonFixedThreadPool(nThreads: Int): ThreadPoolExecutor = Executors.newFixedThreadPool(nThreads, daemonThreadFactory).asInstanceOf[ThreadPoolExecutor] /** * Delete a file or directory and its contents recursively. */ def deleteRecursively(file: File) { if (file.isDirectory) { for (child <- file.listFiles()) { deleteRecursively(child) } } if (!file.delete()) { throw new IOException("Failed to delete: " + file) } } /** * Convert a Java memory parameter passed to -Xmx (such as 300m or 1g) to a number of megabytes. * This is used to figure out how much memory to claim from Mesos based on the SPARK_MEM * environment variable. */ def memoryStringToMb(str: String): Int = { val lower = str.toLowerCase if (lower.endsWith("k")) { (lower.substring(0, lower.length-1).toLong / 1024).toInt } else if (lower.endsWith("m")) { lower.substring(0, lower.length-1).toInt } else if (lower.endsWith("g")) { lower.substring(0, lower.length-1).toInt * 1024 } else if (lower.endsWith("t")) { lower.substring(0, lower.length-1).toInt * 1024 * 1024 } else {// no suffix, so it's just a number in bytes (lower.toLong / 1024 / 1024).toInt } } /** * Convert a memory quantity in bytes to a human-readable string such as "4.0 MB". */ def memoryBytesToString(size: Long): String = { val TB = 1L << 40 val GB = 1L << 30 val MB = 1L << 20 val KB = 1L << 10 val (value, unit) = { if (size >= 2*TB) { (size.asInstanceOf[Double] / TB, "TB") } else if (size >= 2*GB) { (size.asInstanceOf[Double] / GB, "GB") } else if (size >= 2*MB) { (size.asInstanceOf[Double] / MB, "MB") } else if (size >= 2*KB) { (size.asInstanceOf[Double] / KB, "KB") } else { (size.asInstanceOf[Double], "B") } } "%.1f %s".formatLocal(Locale.US, value, unit) } /** * Convert a memory quantity in megabytes to a human-readable string such as "4.0 MB". */ def memoryMegabytesToString(megabytes: Long): String = { memoryBytesToString(megabytes * 1024L * 1024L) } /** * Execute a command in the given working directory, throwing an exception if it completes * with an exit code other than 0. */ def execute(command: Seq[String], workingDir: File) { val process = new ProcessBuilder(command: _*) .directory(workingDir) .redirectErrorStream(true) .start() new Thread("read stdout for " + command(0)) { override def run() { for (line <- Source.fromInputStream(process.getInputStream).getLines) { System.err.println(line) } } }.start() val exitCode = process.waitFor() if (exitCode != 0) { throw new SparkException("Process " + command + " exited with code " + exitCode) } } /** * Execute a command in the current working directory, throwing an exception if it completes * with an exit code other than 0. */ def execute(command: Seq[String]) { execute(command, new File(".")) } /** * When called inside a class in the spark package, returns the name of the user code class * (outside the spark package) that called into Spark, as well as which Spark method they called. * This is used, for example, to tell users where in their code each RDD got created. */ def getSparkCallSite: String = { val trace = Thread.currentThread.getStackTrace().filter( el => (!el.getMethodName.contains("getStackTrace"))) // Keep crawling up the stack trace until we find the first function not inside of the spark // package. We track the last (shallowest) contiguous Spark method. This might be an RDD // transformation, a SparkContext function (such as parallelize), or anything else that leads // to instantiation of an RDD. We also track the first (deepest) user method, file, and line. var lastSparkMethod = "<unknown>" var firstUserFile = "<unknown>" var firstUserLine = 0 var finished = false for (el <- trace) { if (!finished) { if (el.getClassName.startsWith("spark.") && !el.getClassName.startsWith("spark.examples.")) { lastSparkMethod = if (el.getMethodName == "<init>") { // Spark method is a constructor; get its class name el.getClassName.substring(el.getClassName.lastIndexOf('.') + 1) } else { el.getMethodName } } else { firstUserLine = el.getLineNumber firstUserFile = el.getFileName finished = true } } } "%s at %s:%s".format(lastSparkMethod, firstUserFile, firstUserLine) } /** * Try to find a free port to bind to on the local host. This should ideally never be needed, * except that, unfortunately, some of the networking libraries we currently rely on (e.g. Spray) * don't let users bind to port 0 and then figure out which free port they actually bound to. * We work around this by binding a ServerSocket and immediately unbinding it. This is *not* * necessarily guaranteed to work, but it's the best we can do. */ def findFreePort(): Int = { val socket = new ServerSocket(0) val portBound = socket.getLocalPort socket.close() portBound } /** * Clone an object using a Spark serializer. */ def clone[T](value: T, serializer: SerializerInstance): T = { serializer.deserialize[T](serializer.serialize(value)) } /** * Detect whether this thread might be executing a shutdown hook. Will always return true if * the current thread is a running a shutdown hook but may spuriously return true otherwise (e.g. * if System.exit was just called by a concurrent thread). * * Currently, this detects whether the JVM is shutting down by Runtime#addShutdownHook throwing * an IllegalStateException. */ def inShutdown(): Boolean = { try { val hook = new Thread { override def run() {} } Runtime.getRuntime.addShutdownHook(hook) Runtime.getRuntime.removeShutdownHook(hook) } catch { case ise: IllegalStateException => return true } return false } }
koeninger/spark
core/src/main/scala/spark/Utils.scala
Scala
bsd-3-clause
16,754
/** * PANDA 3D SOFTWARE * Copyright (c) Carnegie Mellon University. All rights reserved. * * All use of this software is subject to the terms of the revised BSD * license. You should have received a copy of this license along * with this source code in a file named "LICENSE." * * @file pta_int.h * @author drose * @date 2000-05-10 */ #ifndef PTA_INT_H #define PTA_INT_H #include "pandabase.h" #include "pointerToArray.h" #include "vector_int.h" /** * A pta of ints. This class is defined once here, and exported to PANDA.DLL; * other packages that want to use a pta of this type (whether they need to * export it or not) should include this header file, rather than defining the * pta again. */ EXPORT_TEMPLATE_CLASS(EXPCL_PANDAEXPRESS, EXPTP_PANDAEXPRESS, PointerToBase<ReferenceCountedVector<int> >) EXPORT_TEMPLATE_CLASS(EXPCL_PANDAEXPRESS, EXPTP_PANDAEXPRESS, PointerToArrayBase<int>) EXPORT_TEMPLATE_CLASS(EXPCL_PANDAEXPRESS, EXPTP_PANDAEXPRESS, PointerToArray<int>) EXPORT_TEMPLATE_CLASS(EXPCL_PANDAEXPRESS, EXPTP_PANDAEXPRESS, ConstPointerToArray<int>) typedef PointerToArray<int> PTA_int; typedef ConstPointerToArray<int> CPTA_int; // Tell GCC that we'll take care of the instantiation explicitly here. #ifdef __GNUC__ #pragma interface #endif #endif
tobspr/panda3d
panda/src/express/pta_int.h
C
bsd-3-clause
1,287
<div class="col-md-9"> <div class="row" id="productMain"> <div class="col-sm-6"> <div id="mainImage"> <img src="img/detailbig1.jpg" alt="" class="img-responsive"> </div> <div class="ribbon sale"> <div class="theribbon">SALE</div> <div class="ribbon-background"></div> </div> <!-- /.ribbon --> <div class="ribbon new"> <div class="theribbon">NEW</div> <div class="ribbon-background"></div> </div> <!-- /.ribbon --> </div> <div class="col-sm-6"> <div class="box"> <h1 class="text-center">White Blouse Armani</h1> <p class="goToDescription"><a href="#details" class="scroll-to">Scroll to product details, material & care and sizing</a> </p> <p class="price">$124.00</p> <p class="text-center buttons"> <a href="basket.html" class="btn btn-primary"><i class="fa fa-shopping-cart"></i> Add to cart</a> <a href="basket.html" class="btn btn-default"><i class="fa fa-heart"></i> Add to wishlist</a> </p> </div> <div class="row" id="thumbs"> <div class="col-xs-4"> <a href="img/detailbig1.jpg" class="thumb"> <img src="img/detailsquare.jpg" alt="" class="img-responsive"> </a> </div> <div class="col-xs-4"> <a href="img/detailbig2.jpg" class="thumb"> <img src="img/detailsquare2.jpg" alt="" class="img-responsive"> </a> </div> <div class="col-xs-4"> <a href="img/detailbig3.jpg" class="thumb"> <img src="img/detailsquare3.jpg" alt="" class="img-responsive"> </a> </div> </div> </div> </div> <div class="box" id="details"> <p> <h4>Product details</h4> <p>White lace top, woven, has a round neck, short sleeves, has knitted lining attached</p> <h4>Material & care</h4> <ul> <li>Polyester</li> <li>Machine wash</li> </ul> <h4>Size & Fit</h4> <ul> <li>Regular fit</li> <li>The model (height 5'8" and chest 33") is wearing a size S</li> </ul> <blockquote> <p><em>Define style this season with Armani's new range of trendy tops, crafted with intricate details. Create a chic statement look by teaming this lace number with skinny jeans and pumps.</em> </p> </blockquote> <hr> <div class="social"> <h4>Show it to your friends</h4> <p> <a href="#" class="external facebook" data-animate-hover="pulse"><i class="fa fa-facebook"></i></a> <a href="#" class="external gplus" data-animate-hover="pulse"><i class="fa fa-google-plus"></i></a> <a href="#" class="external twitter" data-animate-hover="pulse"><i class="fa fa-twitter"></i></a> <a href="#" class="email" data-animate-hover="pulse"><i class="fa fa-envelope"></i></a> </p> </div> </div> </div> <!-- /.col-md-9 -->
JayMorrison/Wesler.com
frontend/views/site/tmp.html
HTML
bsd-3-clause
3,445
<?php /** * @link http://www.yiiframework.com/ * @copyright Copyright (c) 2008 Yii Software LLC * @license http://www.yiiframework.com/license/ */ namespace yii\db; /** * ColumnSchema class describes the metadata of a column in a database table. * * @author Qiang Xue <qiang.xue@gmail.com> * @since 2.0 */ class ColumnSchema extends \yii\base\Component { /** * @var string name of this column (without quotes). */ public $name; /** * @var boolean whether this column can be null. */ public $allowNull; /** * @var string abstract type of this column. Possible abstract types include: * string, text, boolean, smallint, integer, bigint, float, decimal, datetime, * timestamp, time, date, binary, and money. */ public $type; /** * @var string the PHP type of this column. Possible PHP types include: * string, boolean, integer, double. */ public $phpType; /** * @var string the DB type of this column. Possible DB types vary according to the type of DBMS. */ public $dbType; /** * @var mixed default value of this column */ public $defaultValue; /** * @var array enumerable values. This is set only if the column is declared to be an enumerable type. */ public $enumValues; /** * @var integer display size of the column. */ public $size; /** * @var integer precision of the column data, if it is numeric. */ public $precision; /** * @var integer scale of the column data, if it is numeric. */ public $scale; /** * @var boolean whether this column is a primary key */ public $isPrimaryKey; /** * @var boolean whether this column is auto-incremental */ public $autoIncrement = false; /** * @var boolean whether this column is unsigned. This is only meaningful * when [[type]] is `smallint`, `integer` or `bigint`. */ public $unsigned; /** * @var string comment of this column. Not all DBMS support this. */ public $comment; /** * Converts the input value according to [[phpType]]. * If the value is null or an [[Expression]], it will not be converted. * @param mixed $value input value * @return mixed converted value */ public function typecast($value) { if ($value === null || gettype($value) === $this->phpType || $value instanceof Expression) { return $value; } switch ($this->phpType) { case 'string': return (string)$value; case 'integer': return (integer)$value; case 'boolean': return (boolean)$value; } return $value; } }
skynorg/yii2
yii/db/ColumnSchema.php
PHP
bsd-3-clause
2,479
#include "FLA_lapack2flame_return_defs.h" #include "FLA_f2c.h" /* Table of constant values */ static int c__1 = 1; static int c_n1 = -1; int sgeqrfp_check(int *m, int *n, float *a, int *lda, float *tau, float *work, int *lwork, int *info) { /* System generated locals */ int a_dim1, a_offset, i__1; /* Local variables */ int k, nb; int lwkopt; logical lquery; /* Parameter adjustments */ a_dim1 = *lda; a_offset = 1 + a_dim1; a -= a_offset; --tau; --work; /* Function Body */ *info = 0; nb = ilaenv_(&c__1, "SGEQRF", " ", m, n, &c_n1, &c_n1); lwkopt = *n * nb; work[1] = (float) lwkopt; lquery = *lwork == -1; if (*m < 0) { *info = -1; } else if (*n < 0) { *info = -2; } else if (*lda < max(1,*m)) { *info = -4; } else if (*lwork < max(1,*n) && ! lquery) { *info = -7; } if (*info != 0) { i__1 = -(*info); xerbla_("SGEQRFP", &i__1); return LAPACK_FAILURE; } else if (lquery) { return LAPACK_QUERY_RETURN; } /* Quick return if possible */ k = min(*m,*n); if (k == 0) { work[1] = 1.f; return LAPACK_QUICK_RETURN; } return LAPACK_SUCCESS; }
yaowee/libflame
src/map/lapack2flamec/check/sgeqrfp.c
C
bsd-3-clause
1,288
/* * Table */ table.dataTable { margin: 0 auto; clear: both; width: 100%; } table.dataTable thead th { font-weight: bold; cursor: pointer; *cursor: hand; } table.dataTable tfoot th { padding: 3px 18px 3px 10px; border-top: 1px solid black; font-weight: bold; } table.dataTable td { padding: 3px 10px; } table.dataTable td.center, table.dataTable td.dataTables_empty { text-align: center; } table.dataTable tr.odd { background-color: #E2E4FF; } table.dataTable tr.even { background-color: white; } table.dataTable tr.odd td.sorting_1 { background-color: #f1f1f1; } table.dataTable tr.odd td.sorting_2 { background-color: #DADCFF; } table.dataTable tr.odd td.sorting_3 { background-color: #E0E2FF; } table.dataTable tr.even td.sorting_1 { background-color: #f9f9f9; } table.dataTable tr.even td.sorting_2 { background-color: #F2F3FF; } table.dataTable tr.even td.sorting_3 { background-color: #F9F9FF; } /* * Table wrapper */ .dataTables_wrapper { position: relative; clear: both; *zoom: 1; } /* * Page length menu */ .dataTables_length { float: left; } /* * Filter */ .dataTables_filter { float: right; text-align: right; } /* * Table information */ .dataTables_info { clear: both; float: left; } /* * Pagination */ .dataTables_paginate { float: right; text-align: right; } /* Two button pagination - previous / next */ .paginate_disabled_previous, .paginate_enabled_previous, .paginate_disabled_next, .paginate_enabled_next { height: 19px; float: left; cursor: pointer; *cursor: hand; color: #111 !important; } .paginate_disabled_previous:hover, .paginate_enabled_previous:hover, .paginate_disabled_next:hover, .paginate_enabled_next:hover { text-decoration: none !important; } .paginate_disabled_previous:active, .paginate_enabled_previous:active, .paginate_disabled_next:active, .paginate_enabled_next:active { outline: none; } .paginate_disabled_previous, .paginate_disabled_next { color: #666 !important; } .paginate_disabled_previous, .paginate_enabled_previous { padding-left: 23px; } .paginate_disabled_next, .paginate_enabled_next { padding-right: 23px; margin-left: 10px; } .paginate_enabled_previous { background: url('../images/back_enabled.png') no-repeat top left; } .paginate_enabled_previous:hover { background: url('../images/back_enabled_hover.png') no-repeat top left; } .paginate_disabled_previous { background: url('../images/back_disabled.png') no-repeat top left; } .paginate_enabled_next { background: url('../images/forward_enabled.png') no-repeat top right; } .paginate_enabled_next:hover { background: url('../images/forward_enabled_hover.png') no-repeat top right; } .paginate_disabled_next { background: url('../images/forward_disabled.png') no-repeat top right; } /* Full number pagination */ .paging_full_numbers { height: 22px; line-height: 22px; } .paging_full_numbers a:active { outline: none } .paging_full_numbers a:hover { text-decoration: none; } .paging_full_numbers a.paginate_button, .paging_full_numbers a.paginate_active { border: 1px solid #aaa; -webkit-border-radius: 5px; -moz-border-radius: 5px; border-radius: 5px; padding: 2px 5px; margin: 0 3px; cursor: pointer; *cursor: hand; color: #333 !important; } .paging_full_numbers a.paginate_button { background-color: #ddd; } .paging_full_numbers a.paginate_button:hover { background-color: #ccc; text-decoration: none !important; } .paging_full_numbers a.paginate_active { background-color: #99B3FF; } /* * Processing indicator */ .dataTables_processing { position: absolute; top: 50%; left: 50%; width: 250px; height: 30px; margin-left: -125px; margin-top: -15px; padding: 14px 0 30px 0; border: 1px solid #ddd; text-align: center; color: #999; font-size: 14px; background-color: white; } /* * Sorting */ .sorting { background: url('../images/sort_both.png') no-repeat center right; } .sorting_asc { background: url('../images/sort_asc.png') no-repeat center right; } .sorting_desc { background: url('../images/sort_desc.png') no-repeat center right; } .sorting_asc_disabled { background: url('../images/sort_asc_disabled.png') no-repeat center right; } .sorting_desc_disabled { background: url('../images/sort_desc_disabled.png') no-repeat center right; } table.dataTable thead th:active, table.dataTable thead td:active { outline: none; } /* * Scrolling */ .dataTables_scroll { clear: both; } .dataTables_scrollBody { *margin-top: -1px; -webkit-overflow-scrolling: touch; }
web-ir/szyk
public/css/jquery.dataTables.css
CSS
bsd-3-clause
4,493
// // DiskCacheStd.h // Macshroom // // Created by Moishe Lettvin on 11/7/06. // Copyright (C) 2006 Google Inc. All rights reserved. // // #ifndef TALK_BASE_DISKCACHESTD_H__ #define TALK_BASE_DISKCACHESTD_H__ #include "talk/base/diskcache.h" namespace talk_base { class DiskCacheStd : public DiskCache { protected: virtual bool InitializeEntries(); virtual bool PurgeFiles(); virtual bool FileExists(const std::string& filename) const; virtual bool DeleteFile(const std::string& filename) const; }; } #endif // TALK_BASE_DISKCACHESTD_H__
rwatson/chromium-capsicum
third_party/libjingle/files/talk/base/diskcachestd.h
C
bsd-3-clause
559
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/lib/ui/painting/image_generator.h" #include "flutter/fml/logging.h" namespace flutter { ImageGenerator::~ImageGenerator() = default; sk_sp<SkImage> ImageGenerator::GetImage() { SkImageInfo info = GetInfo(); SkBitmap bitmap; if (!bitmap.tryAllocPixels(info)) { FML_DLOG(ERROR) << "Failed to allocate memory for bitmap of size " << info.computeMinByteSize() << "B"; return nullptr; } const auto& pixmap = bitmap.pixmap(); if (!GetPixels(pixmap.info(), pixmap.writable_addr(), pixmap.rowBytes())) { FML_DLOG(ERROR) << "Failed to get pixels for image."; return nullptr; } bitmap.setImmutable(); return SkImage::MakeFromBitmap(bitmap); } BuiltinSkiaImageGenerator::~BuiltinSkiaImageGenerator() = default; BuiltinSkiaImageGenerator::BuiltinSkiaImageGenerator( std::unique_ptr<SkImageGenerator> generator) : generator_(std::move(generator)) {} const SkImageInfo& BuiltinSkiaImageGenerator::GetInfo() { return generator_->getInfo(); } unsigned int BuiltinSkiaImageGenerator::GetFrameCount() const { return 1; } unsigned int BuiltinSkiaImageGenerator::GetPlayCount() const { return 1; } const ImageGenerator::FrameInfo BuiltinSkiaImageGenerator::GetFrameInfo( unsigned int frame_index) const { return {.required_frame = std::nullopt, .duration = 0, .disposal_method = SkCodecAnimation::DisposalMethod::kKeep}; } SkISize BuiltinSkiaImageGenerator::GetScaledDimensions(float desired_scale) { return generator_->getInfo().dimensions(); } bool BuiltinSkiaImageGenerator::GetPixels( const SkImageInfo& info, void* pixels, size_t row_bytes, unsigned int frame_index, std::optional<unsigned int> prior_frame) { return generator_->getPixels(info, pixels, row_bytes); } std::unique_ptr<ImageGenerator> BuiltinSkiaImageGenerator::MakeFromGenerator( std::unique_ptr<SkImageGenerator> generator) { if (!generator) { return nullptr; } return std::make_unique<BuiltinSkiaImageGenerator>(std::move(generator)); } BuiltinSkiaCodecImageGenerator::~BuiltinSkiaCodecImageGenerator() = default; BuiltinSkiaCodecImageGenerator::BuiltinSkiaCodecImageGenerator( std::unique_ptr<SkCodec> codec) : codec_generator_(static_cast<SkCodecImageGenerator*>( SkCodecImageGenerator::MakeFromCodec(std::move(codec)).release())) {} BuiltinSkiaCodecImageGenerator::BuiltinSkiaCodecImageGenerator( sk_sp<SkData> buffer) : codec_generator_(static_cast<SkCodecImageGenerator*>( SkCodecImageGenerator::MakeFromEncodedCodec(buffer).release())) {} const SkImageInfo& BuiltinSkiaCodecImageGenerator::GetInfo() { return codec_generator_->getInfo(); } unsigned int BuiltinSkiaCodecImageGenerator::GetFrameCount() const { return codec_generator_->getFrameCount(); } unsigned int BuiltinSkiaCodecImageGenerator::GetPlayCount() const { auto repetition_count = codec_generator_->getRepetitionCount(); return repetition_count < 0 ? kInfinitePlayCount : repetition_count + 1; } const ImageGenerator::FrameInfo BuiltinSkiaCodecImageGenerator::GetFrameInfo( unsigned int frame_index) const { SkCodec::FrameInfo info = {}; codec_generator_->getFrameInfo(frame_index, &info); return { .required_frame = info.fRequiredFrame == SkCodec::kNoFrame ? std::nullopt : std::optional<unsigned int>(info.fRequiredFrame), .duration = static_cast<unsigned int>(info.fDuration), .disposal_method = info.fDisposalMethod}; } SkISize BuiltinSkiaCodecImageGenerator::GetScaledDimensions( float desired_scale) { return codec_generator_->getScaledDimensions(desired_scale); } bool BuiltinSkiaCodecImageGenerator::GetPixels( const SkImageInfo& info, void* pixels, size_t row_bytes, unsigned int frame_index, std::optional<unsigned int> prior_frame) { SkCodec::Options options; options.fFrameIndex = frame_index; if (prior_frame.has_value()) { options.fPriorFrame = prior_frame.value(); } return codec_generator_->getPixels(info, pixels, row_bytes, &options); } std::unique_ptr<ImageGenerator> BuiltinSkiaCodecImageGenerator::MakeFromData( sk_sp<SkData> data) { auto codec = SkCodec::MakeFromData(data); if (!codec) { return nullptr; } return std::make_unique<BuiltinSkiaCodecImageGenerator>(std::move(codec)); } } // namespace flutter
flutter/engine
lib/ui/painting/image_generator.cc
C++
bsd-3-clause
4,577
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"/> <meta name="keywords" content="W3C SVG 1.1 Test Suite testsuite mobile"/> <meta name="description" content="W3C SVG 1.2 Tiny Test Suite"/> <title> SVG 1.2 Tiny test:struct-image-08-t </title> <style type="text/css"> <!-- .bodytext { font-family:verdana, helvetica, sans-serif; font-size: 12pt; line-height: 125%; text-align: Left; margin-top: 0; margin-bottom: 0 } .pageTitle { line-height: 150%; font-size: 20pt; font-weight : 900; margin-bottom: 20pt } .pageSubTitle { color : blue; line-height: 100%; font-size: 24pt; font-weight : 900 } .openChapter { color : blue; line-height: 125%; font-weight : 900 } .openSection { color : blue; line-height: 125%; font-weight : 900 } .info { color : black; line-height: 110%; font-size: 10pt; font-weight : 100 } p { margin-top:0; margin-bottom:0; padding-top:0; padding-bottom:0 } blockquote { margin-top:0; margin-bottom:0; padding-top:0; padding-bottom:0 } .opscript {margin-left: 3%; margin-right: 3%; } .opscript p { margin-top: 0.7em} .navbar {background: black; color: white; font-weight: bold} a,a:visited { color: blue } --> </style> </head> <body class="bodytext"> <div class="linkbar"> <p> <a href="struct-image-08-t.html">Tiny version</a></p> <p>Specification link: <a target="spec" href="http://www.w3.org/TR/SVGMobile12/struct.html">5.7 The 'image' element</a></p> <p> <a href="struct-image-07-t.html">struct-image-07-t ←</a> <a href="index.html">index</a> <a href="struct-image-09-t.html">→ struct-image-09-t</a> </p></div> <table align="center" border="0" cellspacing="0" cellpadding="10"> <tr> <td align="center" colspan="3"> <table border="0" cellpadding="8"> <tr> <td align="center" colspan="2" class="pageTitle"> <h1>struct-image-08-t</h1> </td> </tr> <tr class="navbar"> <td align="center"> SVG Image </td> <td align="center"> PNG Image </td> </tr> <tr> <td align="right"> <object data="../svggen/struct-image-08-t.svg" width="480" height="360" type="image/svg+xml"><p style="font-size:300%;color:red">FAIL</p></object> </td> <td align="left"> <img alt="raster image of struct-image-08-t" src="../png/struct-image-08-t.png" width="480" height="360"/> </td> </tr> </table> </td> </tr> </table> <div class="opscript"> <p>Tests PNG images with alpha. The result should be identical to the reference image.</p> </div> <div class="linkbar"> <p> <a href="struct-image-07-t.html">struct-image-07-t ←</a> <a href="index.html">index</a> <a href="struct-image-09-t.html">→ struct-image-09-t</a> </p></div> </body> </html>
frivoal/presto-testo
SVG/Testsuites/W3C-Tiny-1_2/tiny-1_2/struct-image-08-t.html
HTML
bsd-3-clause
3,035
# Copyright (c) 2013 David Holm <dholmster@gmail.com> # This file is part of SimpleGUITk - https://github.com/dholm/simpleguitk # See the file 'COPYING' for copying permission. from .plot import plot_lines
dholm/simpleguitk
simpleplot/__init___flymake.py
Python
bsd-3-clause
207
<!DOCTYPE html> <html dir="ltr" lang="en"> <head> <title>Ruby Parser - Rubinius</title> <meta charset="UTF-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <meta content='en' http-equiv='content-language'> <meta content='Rubinius is an implementation of the Ruby programming language. The Rubinius bytecode virtual machine is written in C++. The bytecode compiler is written in pure Ruby. The vast majority of the core library is also written in Ruby, with some supporting primitives that interact with the VM directly.' name='description'> <link href='/' rel='home'> <link href='/' rel='start'> <link href='/doc/en/bytecode-compiler' rel='prev' title='Bytecode Compiler'> <link href='/doc/en/bytecode-compiler/ast' rel='next' title='AST'> <!--[if IE]><script src="http://html5shiv.googlecode.com/svn/trunk/html5.js" type="text/javascript"></script><![endif]--> <script src="/javascripts/jquery-1.3.2.js"></script> <script src="/javascripts/paging_keys.js"></script> <script src="/javascripts/application.js"></script> <style>article, aside, dialog, figure, footer, header, hgroup, menu, nav, section { display: block; }</style> <link href="/stylesheets/blueprint/screen.css" media="screen" rel="stylesheet" /> <link href="/stylesheets/application.css" media="screen" rel="stylesheet" /> <link href="/stylesheets/blueprint/print.css" media="print" rel="stylesheet" /> <!--[if IE]><link href="/stylesheets/blueprint/ie.css" media="screen" rel="stylesheet" type="text/css" /><![endif]--> <!--[if IE]><link href="/stylesheets/ie.css" media="screen" rel="stylesheet" type="text/css" /><![endif]--> <link href="/stylesheets/pygments.css" media="screen" rel="stylesheet" /> <link href="/favicon.ico" rel="shortcut icon" type="image/vnd.microsoft.icon" /> <link href="/images/apple-touch-icon.png" rel="apple-touch-icon" type="image/png" /> <link href="/images/apple-touch-icon.png" rel="apple-touch-icon" type="image/png" sizes="72x72" /> <link href="/images/apple-touch-icon.png" rel="apple-touch-icon" type="image/png" sizes="114x114" /> </head> <body> <div class='container'> <div class='span-21 doc_menu'> <header> <nav> <ul> <li><a href="/">Home</a></li> <li><a id="blog" href="/blog">Blog</a></li> <li><a id="documentation" href="/doc/en">Documentation</a></li> <li><a href="/projects">Projects</a></li> <li><a href="/roadmap">Roadmap</a></li> <li><a href="/releases">Releases</a></li> </ul> </nav> </header> </div> <div class='span-3 last'> <div id='version'> <a href="/releases/1.2.4">1.2.4</a> </div> </div> </div> <div class="container languages"> <nav> <span class="label">Languages:</span> <ul> <li><a href="/doc/de/bytecode-compiler/parser/" >de</a></li> <li><a href="/doc/en/bytecode-compiler/parser/" class="current" >en</a></li> <li><a href="/doc/es/bytecode-compiler/parser/" >es</a></li> <li><a href="/doc/fr/bytecode-compiler/parser/" >fr</a></li> <li><a href="/doc/it/bytecode-compiler/parser/" >it</a></li> <li><a href="/doc/ja/bytecode-compiler/parser/" >ja</a></li> <li><a href="/doc/pl/bytecode-compiler/parser/" >pl</a></li> <li><a href="/doc/pt-br/bytecode-compiler/parser/" >pt-br</a></li> <li><a href="/doc/ru/bytecode-compiler/parser/" >ru</a></li> </ul> </nav> </div> <div class="container doc_page_nav"> <span class="label">Previous:</span> <a href="/doc/en/bytecode-compiler">Bytecode Compiler</a> <span class="label">Up:</span> <a href="/doc/en/">Table of Contents</a> <span class="label">Next:</span> <a href="/doc/en/bytecode-compiler/ast">AST</a> </div> <div class="container documentation"> <h2>Ruby Parser</h2> <div class="review"> <p>This topic has missing or partial documentation. Please help us improve it.</p> <p> See <a href="/doc/en/how-to/write-documentation">How-To - Write Documentation</a> </p> </div> <p>The first stage in the compilation pipeline is the Ruby Parser. The Ruby parser receives either a String of code or a file and passes an AST to the next stage of the process, the generator.</p> <p>The parser itself (called Melbourne) has a C part, which is essentially MRI&rsquo;s parser, and a Ruby part, which is responsible for creating the Ruby AST. The C parser communicates with Ruby by calling a method for each node in the parse tree.</p> <p>Each of these methods has a signature containing all of the information about the part of the parse tree it is processing. For instance, if the underlying Ruby code has an <code>if</code> statement, the C parser will call <code>process_if</code> with the line number, a parameter representing the condition, and parameters representing the body of the if statement and the else section, if any.</p> <pre><code>def process_if(line, cond, body, else_body) AST::If.new line, cond, body, else_body end </code></pre> <p>You can see all of the possible <code>process_</code> calls by taking a look at <code>lib/melbourne/processor.rb</code> in the Rubinius source code.</p> <p>Note that in many cases, the parser passes the result of calling a previous <code>process_</code> method as the arguments to a <code>process_</code> method. In the case of <code>true if 1</code>, the parser first calls <code>process_lit(line 1)</code> and <code>process_true(line)</code>. It also calls <code>process_nil(line)</code>, because the original parse tree contains a <code>nil</code> for the <code>else</code> body. It then calls <code>process_if</code> with the line number, the result of <code>process_lit</code>, the result of <code>process_true</code>, and the result of <code>process_nil</code>.</p> <p>This process recursively builds up a tree structure, which Rubinius passes on to the next stage, the Generator stage.</p> <h2 id="files-referenced">Files Referenced</h2> <ul> <li><em>lib/melbourne/processor.rb</em>: the Ruby interface to the C parser. This file contains methods beginning with <code>process_</code>, which the C parser calls for each node in the raw parse tree.</li> <li><em>lib/compiler/ast/*</em>: the definitions for each of the AST nodes used by the melbourne processor.</li> </ul> <h2 id="customization">Customization</h2> <p>There are two ways to customize this stage of the compilation process. The easiest way to customize the creation of the AST is through <a href="/doc/en/bytecode-compiler/transformations/">AST Transforms</a>.</p> <p>You can also subclass the Melbourne processor and define your own handlers for the <code>process_</code> methods. This is an advanced topic that is not yet documented.</p> </div> <div class="container doc_page_nav"> <span class="label">Previous:</span> <a href="/doc/en/bytecode-compiler">Bytecode Compiler</a> <span class="label">Up:</span> <a href="/doc/en/">Table of Contents</a> <span class="label">Next:</span> <a href="/doc/en/bytecode-compiler/ast">AST</a> </div> <div class="container"> <div id="disqus_thread"></div> <script type="text/javascript"> var disqus_shortname = 'rubinius'; var disqus_identifier = '/doc/en/bytecode-compiler/parser/'; var disqus_url = 'http://rubini.us/doc/en/bytecode-compiler/parser/'; (function() { var dsq = document.createElement('script'); dsq.type = 'text/javascript'; dsq.async = true; dsq.src = 'http://' + disqus_shortname + '.disqus.com/embed.js'; (document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq); })(); </script> <noscript>Please enable JavaScript to view the <a href="http://disqus.com/?ref_noscript">comments powered by Disqus.</a></noscript> </div> <footer> <div class='container'> <nav> <ul> <li><a rel="external" href="http://twitter.com/rubinius">Follow Rubinius on Twitter</a></li> <li><a rel="external" href="http://github.com/rubinius/rubinius">Fork Rubinius on github</a></li> <li><a rel="external" href="http://engineyard.com">An Engine Yard project</a></li> </ul> </nav> </div> </footer> <script> var _gaq=[['_setAccount','UA-12328521-1'],['_trackPageview']]; (function(d,t){var g=d.createElement(t),s=d.getElementsByTagName(t)[0];g.async=1; g.src=('https:'==location.protocol?'//ssl':'//www')+'.google-analytics.com/ga.js'; s.parentNode.insertBefore(g,s)}(document,'script')); </script> </body> </html>
yob/debian-rubinius
web/_site/doc/en/bytecode-compiler/parser/index.html
HTML
bsd-3-clause
8,883
/** * This file is part of the CernVM File System. */ #include <gtest/gtest.h> #include "util/pointer.h" #include "util/raii_temp_dir.h" #include "platform.h" #include "util/posix.h" static bool DirExists(const std::string& dir) { platform_stat64 dir_stat; int ret = platform_stat(dir.c_str(), &dir_stat); return ret == 0; } class T_RaiiTempDir : public ::testing::Test {}; TEST_F(T_RaiiTempDir, Basic) { UniquePtr<RaiiTempDir> temp_dir( RaiiTempDir::Create(GetCurrentWorkingDirectory() + "/test_dir")); ASSERT_TRUE(temp_dir.IsValid()); const std::string temp_path = temp_dir->dir(); temp_dir.Destroy(); ASSERT_FALSE(DirExists(temp_path)); } TEST_F(T_RaiiTempDir, DeletedExternally) { UniquePtr<RaiiTempDir> temp_dir( RaiiTempDir::Create(GetCurrentWorkingDirectory() + "/test_dir")); ASSERT_TRUE(temp_dir.IsValid()); const std::string temp_path = temp_dir->dir(); RemoveTree(temp_path); ASSERT_FALSE(DirExists(temp_path)); }
Gangbiao/cvmfs
test/unittests/t_raii_temp_dir.cc
C++
bsd-3-clause
978
SUBROUTINE STIMRQ( LINE, NM, MVAL, NVAL, NK, KVAL, NNB, NBVAL, $ NXVAL, NLDA, LDAVAL, TIMMIN, A, TAU, B, WORK, $ RESLTS, LDR1, LDR2, LDR3, NOUT ) * * -- LAPACK timing routine (version 3.0) -- * Univ. of Tennessee, Univ. of California Berkeley, NAG Ltd., * Courant Institute, Argonne National Lab, and Rice University * March 31, 1993 * * .. Scalar Arguments .. CHARACTER*80 LINE INTEGER LDR1, LDR2, LDR3, NK, NLDA, NM, NNB, NOUT REAL TIMMIN * .. * .. Array Arguments .. INTEGER KVAL( * ), LDAVAL( * ), MVAL( * ), NBVAL( * ), $ NVAL( * ), NXVAL( * ) REAL A( * ), B( * ), RESLTS( LDR1, LDR2, LDR3, * ), $ TAU( * ), WORK( * ) * .. * * Purpose * ======= * * STIMRQ times the LAPACK routines to perform the RQ factorization of * a REAL general matrix. * * Arguments * ========= * * LINE (input) CHARACTER*80 * The input line that requested this routine. The first six * characters contain either the name of a subroutine or a * generic path name. The remaining characters may be used to * specify the individual routines to be timed. See ATIMIN for * a full description of the format of the input line. * * NM (input) INTEGER * The number of values of M and N contained in the vectors * MVAL and NVAL. The matrix sizes are used in pairs (M,N). * * MVAL (input) INTEGER array, dimension (NM) * The values of the matrix row dimension M. * * NVAL (input) INTEGER array, dimension (NM) * The values of the matrix column dimension N. * * NK (input) INTEGER * The number of values of K in the vector KVAL. * * KVAL (input) INTEGER array, dimension (NK) * The values of the matrix dimension K, used in SORMRQ. * * NNB (input) INTEGER * The number of values of NB and NX contained in the * vectors NBVAL and NXVAL. The blocking parameters are used * in pairs (NB,NX). * * NBVAL (input) INTEGER array, dimension (NNB) * The values of the blocksize NB. * * NXVAL (input) INTEGER array, dimension (NNB) * The values of the crossover point NX. * * NLDA (input) INTEGER * The number of values of LDA contained in the vector LDAVAL. * * LDAVAL (input) INTEGER array, dimension (NLDA) * The values of the leading dimension of the array A. * * TIMMIN (input) REAL * The minimum time a subroutine will be timed. * * A (workspace) REAL array, dimension (LDAMAX*NMAX) * where LDAMAX and NMAX are the maximum values of LDA and N. * * TAU (workspace) REAL array, dimension (min(M,N)) * * B (workspace) REAL array, dimension (LDAMAX*NMAX) * * WORK (workspace) REAL array, dimension (LDAMAX*NBMAX) * where NBMAX is the maximum value of NB. * * RESLTS (workspace) REAL array, dimension * (LDR1,LDR2,LDR3,2*NK) * The timing results for each subroutine over the relevant * values of (M,N), (NB,NX), and LDA. * * LDR1 (input) INTEGER * The first dimension of RESLTS. LDR1 >= max(1,NNB). * * LDR2 (input) INTEGER * The second dimension of RESLTS. LDR2 >= max(1,NM). * * LDR3 (input) INTEGER * The third dimension of RESLTS. LDR3 >= max(1,NLDA). * * NOUT (input) INTEGER * The unit number for output. * * Internal Parameters * =================== * * MODE INTEGER * The matrix type. MODE = 3 is a geometric distribution of * eigenvalues. See SLATMS for further details. * * COND REAL * The condition number of the matrix. The singular values are * set to values from DMAX to DMAX/COND. * * DMAX REAL * The magnitude of the largest singular value. * * ===================================================================== * * .. Parameters .. INTEGER NSUBS PARAMETER ( NSUBS = 3 ) INTEGER MODE REAL COND, DMAX PARAMETER ( MODE = 3, COND = 100.0E0, DMAX = 1.0E0 ) * .. * .. Local Scalars .. CHARACTER LABM, SIDE, TRANS CHARACTER*3 PATH CHARACTER*6 CNAME INTEGER I, I4, IC, ICL, IK, ILDA, IM, IMX, INB, INFO, $ ISIDE, ISUB, ITOFF, ITRAN, K, K1, LDA, LW, M, $ M1, MINMN, N, N1, NB, NX REAL OPS, S1, S2, TIME, UNTIME * .. * .. Local Arrays .. LOGICAL TIMSUB( NSUBS ) CHARACTER SIDES( 2 ), TRANSS( 2 ) CHARACTER*6 SUBNAM( NSUBS ) INTEGER ISEED( 4 ), MUSE( 12 ), NUSE( 12 ), RESEED( 4 ) * .. * .. External Functions .. REAL SECOND, SMFLOP, SOPLA EXTERNAL SECOND, SMFLOP, SOPLA * .. * .. External Subroutines .. EXTERNAL ATIMCK, ATIMIN, ICOPY, SGERQF, SLACPY, SLATMS, $ SORGRQ, SORMRQ, SPRTB4, SPRTB5, STIMMG, XLAENV * .. * .. Intrinsic Functions .. INTRINSIC MAX, MIN, REAL * .. * .. Data statements .. DATA SUBNAM / 'SGERQF', 'SORGRQ', 'SORMRQ' / DATA SIDES / 'L', 'R' / , TRANSS / 'N', 'T' / DATA ISEED / 0, 0, 0, 1 / * .. * .. Executable Statements .. * * Extract the timing request from the input line. * PATH( 1: 1 ) = 'Single precision' PATH( 2: 3 ) = 'RQ' CALL ATIMIN( PATH, LINE, NSUBS, SUBNAM, TIMSUB, NOUT, INFO ) IF( INFO.NE.0 ) $ GO TO 230 * * Check that M <= LDA for the input values. * CNAME = LINE( 1: 6 ) CALL ATIMCK( 1, CNAME, NM, MVAL, NLDA, LDAVAL, NOUT, INFO ) IF( INFO.GT.0 ) THEN WRITE( NOUT, FMT = 9999 )CNAME GO TO 230 END IF * * Do for each pair of values (M,N): * DO 70 IM = 1, NM M = MVAL( IM ) N = NVAL( IM ) MINMN = MIN( M, N ) CALL ICOPY( 4, ISEED, 1, RESEED, 1 ) * * Do for each value of LDA: * DO 60 ILDA = 1, NLDA LDA = LDAVAL( ILDA ) * * Do for each pair of values (NB, NX) in NBVAL and NXVAL. * DO 50 INB = 1, NNB NB = NBVAL( INB ) CALL XLAENV( 1, NB ) NX = NXVAL( INB ) CALL XLAENV( 3, NX ) LW = MAX( 1, M*MAX( 1, NB ) ) * * Generate a test matrix of size M by N. * CALL ICOPY( 4, RESEED, 1, ISEED, 1 ) CALL SLATMS( M, N, 'Uniform', ISEED, 'Nonsymm', TAU, $ MODE, COND, DMAX, M, N, 'No packing', B, $ LDA, WORK, INFO ) * IF( TIMSUB( 1 ) ) THEN * * SGERQF: RQ factorization * CALL SLACPY( 'Full', M, N, B, LDA, A, LDA ) IC = 0 S1 = SECOND( ) 10 CONTINUE CALL SGERQF( M, N, A, LDA, TAU, WORK, LW, INFO ) S2 = SECOND( ) TIME = S2 - S1 IC = IC + 1 IF( TIME.LT.TIMMIN ) THEN CALL SLACPY( 'Full', M, N, B, LDA, A, LDA ) GO TO 10 END IF * * Subtract the time used in SLACPY. * ICL = 1 S1 = SECOND( ) 20 CONTINUE S2 = SECOND( ) UNTIME = S2 - S1 ICL = ICL + 1 IF( ICL.LE.IC ) THEN CALL SLACPY( 'Full', M, N, A, LDA, B, LDA ) GO TO 20 END IF * TIME = ( TIME-UNTIME ) / REAL( IC ) OPS = SOPLA( 'SGERQF', M, N, 0, 0, NB ) RESLTS( INB, IM, ILDA, 1 ) = SMFLOP( OPS, TIME, INFO ) ELSE * * If SGERQF was not timed, generate a matrix and factor * it using SGERQF anyway so that the factored form of * the matrix can be used in timing the other routines. * CALL SLACPY( 'Full', M, N, B, LDA, A, LDA ) CALL SGERQF( M, N, A, LDA, TAU, WORK, LW, INFO ) END IF * IF( TIMSUB( 2 ) ) THEN * * SORGRQ: Generate orthogonal matrix Q from the RQ * factorization * CALL SLACPY( 'Full', MINMN, N, A, LDA, B, LDA ) IC = 0 S1 = SECOND( ) 30 CONTINUE CALL SORGRQ( MINMN, N, MINMN, B, LDA, TAU, WORK, LW, $ INFO ) S2 = SECOND( ) TIME = S2 - S1 IC = IC + 1 IF( TIME.LT.TIMMIN ) THEN CALL SLACPY( 'Full', MINMN, N, A, LDA, B, LDA ) GO TO 30 END IF * * Subtract the time used in SLACPY. * ICL = 1 S1 = SECOND( ) 40 CONTINUE S2 = SECOND( ) UNTIME = S2 - S1 ICL = ICL + 1 IF( ICL.LE.IC ) THEN CALL SLACPY( 'Full', MINMN, N, A, LDA, B, LDA ) GO TO 40 END IF * TIME = ( TIME-UNTIME ) / REAL( IC ) OPS = SOPLA( 'SORGRQ', MINMN, N, MINMN, 0, NB ) RESLTS( INB, IM, ILDA, 2 ) = SMFLOP( OPS, TIME, INFO ) END IF * 50 CONTINUE 60 CONTINUE 70 CONTINUE * * Print tables of results * DO 90 ISUB = 1, NSUBS - 1 IF( .NOT.TIMSUB( ISUB ) ) $ GO TO 90 WRITE( NOUT, FMT = 9998 )SUBNAM( ISUB ) IF( NLDA.GT.1 ) THEN DO 80 I = 1, NLDA WRITE( NOUT, FMT = 9997 )I, LDAVAL( I ) 80 CONTINUE END IF WRITE( NOUT, FMT = * ) IF( ISUB.EQ.2 ) $ WRITE( NOUT, FMT = 9996 ) CALL SPRTB4( '( NB, NX)', 'M', 'N', NNB, NBVAL, NXVAL, NM, $ MVAL, NVAL, NLDA, RESLTS( 1, 1, 1, ISUB ), LDR1, $ LDR2, NOUT ) 90 CONTINUE * * Time SORMRQ separately. Here the starting matrix is M by N, and * K is the free dimension of the matrix multiplied by Q. * IF( TIMSUB( 3 ) ) THEN * * Check that K <= LDA for the input values. * CALL ATIMCK( 3, CNAME, NK, KVAL, NLDA, LDAVAL, NOUT, INFO ) IF( INFO.GT.0 ) THEN WRITE( NOUT, FMT = 9999 )SUBNAM( 3 ) GO TO 230 END IF * * Use only the pairs (M,N) where M <= N. * IMX = 0 DO 100 IM = 1, NM IF( MVAL( IM ).LE.NVAL( IM ) ) THEN IMX = IMX + 1 MUSE( IMX ) = MVAL( IM ) NUSE( IMX ) = NVAL( IM ) END IF 100 CONTINUE * * SORMRQ: Multiply by Q stored as a product of elementary * transformations * * Do for each pair of values (M,N): * DO 180 IM = 1, IMX M = MUSE( IM ) N = NUSE( IM ) * * Do for each value of LDA: * DO 170 ILDA = 1, NLDA LDA = LDAVAL( ILDA ) * * Generate an M by N matrix and form its RQ decomposition. * CALL SLATMS( M, N, 'Uniform', ISEED, 'Nonsymm', TAU, $ MODE, COND, DMAX, M, N, 'No packing', A, $ LDA, WORK, INFO ) LW = MAX( 1, M*MAX( 1, NB ) ) CALL SGERQF( M, N, A, LDA, TAU, WORK, LW, INFO ) * * Do first for SIDE = 'L', then for SIDE = 'R' * I4 = 0 DO 160 ISIDE = 1, 2 SIDE = SIDES( ISIDE ) * * Do for each pair of values (NB, NX) in NBVAL and * NXVAL. * DO 150 INB = 1, NNB NB = NBVAL( INB ) CALL XLAENV( 1, NB ) NX = NXVAL( INB ) CALL XLAENV( 3, NX ) * * Do for each value of K in KVAL * DO 140 IK = 1, NK K = KVAL( IK ) * * Sort out which variable is which * IF( ISIDE.EQ.1 ) THEN K1 = M M1 = N N1 = K LW = MAX( 1, N1*MAX( 1, NB ) ) ELSE K1 = M N1 = N M1 = K LW = MAX( 1, M1*MAX( 1, NB ) ) END IF * * Do first for TRANS = 'N', then for TRANS = 'T' * ITOFF = 0 DO 130 ITRAN = 1, 2 TRANS = TRANSS( ITRAN ) CALL STIMMG( 0, M1, N1, B, LDA, 0, 0 ) IC = 0 S1 = SECOND( ) 110 CONTINUE CALL SORMRQ( SIDE, TRANS, M1, N1, K1, A, LDA, $ TAU, B, LDA, WORK, LW, INFO ) S2 = SECOND( ) TIME = S2 - S1 IC = IC + 1 IF( TIME.LT.TIMMIN ) THEN CALL STIMMG( 0, M1, N1, B, LDA, 0, 0 ) GO TO 110 END IF * * Subtract the time used in STIMMG. * ICL = 1 S1 = SECOND( ) 120 CONTINUE S2 = SECOND( ) UNTIME = S2 - S1 ICL = ICL + 1 IF( ICL.LE.IC ) THEN CALL STIMMG( 0, M1, N1, B, LDA, 0, 0 ) GO TO 120 END IF * TIME = ( TIME-UNTIME ) / REAL( IC ) OPS = SOPLA( 'SORMRQ', M1, N1, K1, ISIDE-1, $ NB ) RESLTS( INB, IM, ILDA, $ I4+ITOFF+IK ) = SMFLOP( OPS, TIME, INFO ) ITOFF = NK 130 CONTINUE 140 CONTINUE 150 CONTINUE I4 = 2*NK 160 CONTINUE 170 CONTINUE 180 CONTINUE * * Print tables of results * ISUB = 3 I4 = 1 IF( IMX.GE.1 ) THEN DO 220 ISIDE = 1, 2 SIDE = SIDES( ISIDE ) IF( ISIDE.EQ.1 ) THEN WRITE( NOUT, FMT = 9998 )SUBNAM( ISUB ) IF( NLDA.GT.1 ) THEN DO 190 I = 1, NLDA WRITE( NOUT, FMT = 9997 )I, LDAVAL( I ) 190 CONTINUE END IF END IF DO 210 ITRAN = 1, 2 TRANS = TRANSS( ITRAN ) DO 200 IK = 1, NK IF( ISIDE.EQ.1 ) THEN N = KVAL( IK ) WRITE( NOUT, FMT = 9995 )SUBNAM( ISUB ), SIDE, $ TRANS, 'N', N LABM = 'M' ELSE M = KVAL( IK ) WRITE( NOUT, FMT = 9995 )SUBNAM( ISUB ), SIDE, $ TRANS, 'M', M LABM = 'N' END IF CALL SPRTB5( 'NB', 'K', LABM, NNB, NBVAL, IMX, $ MUSE, NUSE, NLDA, $ RESLTS( 1, 1, 1, I4 ), LDR1, LDR2, $ NOUT ) I4 = I4 + 1 200 CONTINUE 210 CONTINUE 220 CONTINUE ELSE WRITE( NOUT, FMT = 9994 )SUBNAM( ISUB ) END IF END IF 230 CONTINUE 9999 FORMAT( 1X, A6, ' timing run not attempted', / ) 9998 FORMAT( / ' *** Speed of ', A6, ' in megaflops ***' ) 9997 FORMAT( 5X, 'line ', I2, ' with LDA = ', I5 ) 9996 FORMAT( 5X, 'K = min(M,N)', / ) 9995 FORMAT( / 5X, A6, ' with SIDE = ''', A1, ''', TRANS = ''', A1, $ ''', ', A1, ' =', I6, / ) 9994 FORMAT( ' *** No pairs (M,N) found with M <= N: ', A6, $ ' not timed' ) RETURN * * End of STIMRQ * END
yaowee/libflame
lapack-test/lapack-timing/LIN/stimrq.f
FORTRAN
bsd-3-clause
16,666
// ============================================================================= // PROJECT CHRONO - http://projectchrono.org // // Copyright (c) 2014 projectchrono.org // All rights reserved. // // Use of this source code is governed by a BSD-style license that can be found // in the LICENSE file at the top level of the distribution and at // http://projectchrono.org/license-chrono.txt. // // ============================================================================= // Authors: Radu Serban // ============================================================================= // // Tracked vehicle double-pin sprocket model constructed with data from file // (JSON format). // // ============================================================================= #ifndef SPROCKET_DOUBLE_PIN_H #define SPROCKET_DOUBLE_PIN_H #include "chrono_vehicle/ChApiVehicle.h" #include "chrono_vehicle/tracked_vehicle/sprocket/ChSprocketDoublePin.h" #include "chrono_thirdparty/rapidjson/document.h" namespace chrono { namespace vehicle { /// @addtogroup vehicle_tracked_sprocket /// @{ /// Tracked vehicle double-pin sprocket model constructed with data from file (JSON format). class CH_VEHICLE_API SprocketDoublePin : public ChSprocketDoublePin { public: SprocketDoublePin(const std::string& filename); SprocketDoublePin(const rapidjson::Document& d); ~SprocketDoublePin() {} /// Get the number of teeth of the gear. virtual int GetNumTeeth() const override { return m_num_teeth; } /// Get the radius of the gear. /// This quantity is used during the automatic track assembly. virtual double GetAssemblyRadius() const override { return m_gear_RA; } /// Return the mass of the gear body. virtual double GetGearMass() const override { return m_gear_mass; } /// Return the moments of inertia of the gear body. virtual const ChVector<>& GetGearInertia() override { return m_gear_inertia; } /// Return the inertia of the axle shaft. virtual double GetAxleInertia() const override { return m_axle_inertia; } /// Return the distance between the two gear profiles. virtual double GetSeparation() const override { return m_separation; } /// Return the radius of the addendum circle. virtual double GetOuterRadius() const override { return m_gear_RT; } /// Return the radius of the (concave) tooth circular arc. virtual double GetArcRadius() const override { return m_gear_R; } /// Return height of arc center. virtual double GetArcCenterHeight() const override { return m_gear_C; } /// Return offset of arc center. virtual double GetArcCenterOffset() const override { return m_gear_W; } /// Return the allowed backlash (play) before lateral contact with track shoes is enabled (to prevent detracking). virtual double GetLateralBacklash() const override { return m_lateral_backlash; } private: virtual void Create(const rapidjson::Document& d) override; /// Create the contact material consistent with the specified contact method. virtual void CreateContactMaterial(ChContactMethod contact_method) override; /// Add visualization of the sprocket. virtual void AddVisualizationAssets(VisualizationType vis) override; int m_num_teeth; double m_gear_mass; ChVector<> m_gear_inertia; double m_axle_inertia; double m_separation; double m_gear_RT; double m_gear_R; double m_gear_RA; double m_gear_C; double m_gear_W; double m_lateral_backlash; bool m_has_mesh; std::string m_meshFile; MaterialInfo m_mat_info; }; /// @} vehicle_tracked_sprocket } // end namespace vehicle } // end namespace chrono #endif
projectchrono/chrono
src/chrono_vehicle/tracked_vehicle/sprocket/SprocketDoublePin.h
C
bsd-3-clause
3,688
/* * CDDL HEADER START * * The contents of this file are subject to the terms of the * Common Development and Distribution License (the "License"). * You may not use this file except in compliance with the License. * * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE * or http://www.opensolaris.org/os/licensing. * See the License for the specific language governing permissions * and limitations under the License. * * When distributing Covered Code, include this CDDL HEADER in each * file and include the License file at usr/src/OPENSOLARIS.LICENSE. * If applicable, add the following below this CDDL HEADER, with the * fields enclosed by brackets "[]" replaced with your own identifying * information: Portions Copyright [yyyy] [name of copyright owner] * * CDDL HEADER END */ /* * Copyright 2014 Garrett D'Amore <garrett@damore.org> * * Copyright 2008 Sun Microsystems, Inc. All rights reserved. * Use is subject to license terms. */ #ifndef _LIBINTL_H #define _LIBINTL_H #include <sys/isa_defs.h> #ifdef __cplusplus extern "C" { #endif /* * wchar_t is a built-in type in standard C++ and as such is not * defined here when using standard C++. However, the GNU compiler * fixincludes utility nonetheless creates its own version of this * header for use by gcc and g++. In that version it adds a redundant * guard for __cplusplus. To avoid the creation of a gcc/g++ specific * header we need to include the following magic comment: * * we must use the C++ compiler's type * * The above comment should not be removed or changed until GNU * gcc/fixinc/inclhack.def is updated to bypass this header. */ #if !defined(__cplusplus) || (__cplusplus < 199711L && !defined(__GNUG__)) #ifndef _WCHAR_T #define _WCHAR_T #if defined(_LP64) typedef int wchar_t; #else typedef long wchar_t; #endif #endif /* !_WCHAR_T */ #endif /* !defined(__cplusplus) ... */ #define TEXTDOMAINMAX 256 #define __GNU_GETTEXT_SUPPORTED_REVISION(m) \ ((((m) == 0) || ((m) == 1)) ? 1 : -1) extern char *dcgettext(const char *, const char *, const int); extern char *dgettext(const char *, const char *); extern char *gettext(const char *); extern char *textdomain(const char *); extern char *bindtextdomain(const char *, const char *); /* * LI18NUX 2000 Globalization Specification Version 1.0 * with Amendment 2 */ extern char *dcngettext(const char *, const char *, const char *, unsigned long int, int); extern char *dngettext(const char *, const char *, const char *, unsigned long int); extern char *ngettext(const char *, const char *, unsigned long int); extern char *bind_textdomain_codeset(const char *, const char *); /* Word handling functions --- requires dynamic linking */ /* Warning: these are experimental and subject to change. */ extern int wdinit(void); extern int wdchkind(wchar_t); extern int wdbindf(wchar_t, wchar_t, int); extern wchar_t *wddelim(wchar_t, wchar_t, int); extern wchar_t mcfiller(void); extern int mcwrap(void); #ifdef __cplusplus } #endif #endif /* _LIBINTL_H */
TigerBSD/TigerBSD
FreeBSD/cddl/contrib/opensolaris/head/libintl.h
C
isc
3,047
var path = require('path'); var Supervisor = require(path.join(__dirname,'/../processes/supervisor')); /** * @function stopGateway * @description Halts all pocesses. */ function stopGateway() { return new Supervisor().stop(); } module.exports = stopGateway;
zealord/gatewayd
lib/api/stop_gateway.js
JavaScript
isc
272
/** @file GUIDs used for SAL system table entries in the EFI system table. SAL System Table contains Itanium-based processor centric information about the system. Copyright (c) 2006 - 2009, Intel Corporation. All rights reserved.<BR> This program and the accompanying materials are licensed and made available under the terms and conditions of the BSD License which accompanies this distribution. The full text of the license may be found at http://opensource.org/licenses/bsd-license.php THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. @par Revision Reference: GUIDs defined in UEFI 2.0 spec. **/ #ifndef __SAL_SYSTEM_TABLE_GUID_H__ #define __SAL_SYSTEM_TABLE_GUID_H__ #define SAL_SYSTEM_TABLE_GUID \ { \ 0xeb9d2d32, 0x2d88, 0x11d3, {0x9a, 0x16, 0x0, 0x90, 0x27, 0x3f, 0xc1, 0x4d } \ } extern EFI_GUID gEfiSalSystemTableGuid; #endif
jjingram/schminke
inc/edk2/MdePkg/Include/Guid/SalSystemTable.h
C
mit
1,128
"""This file contains code for use with "Think Stats" and "Think Bayes", both by Allen B. Downey, available from greenteapress.com Copyright 2014 Allen B. Downey License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html """ from __future__ import print_function, division """This file contains class definitions for: Hist: represents a histogram (map from values to integer frequencies). Pmf: represents a probability mass function (map from values to probs). _DictWrapper: private parent class for Hist and Pmf. Cdf: represents a discrete cumulative distribution function Pdf: represents a continuous probability density function """ import bisect import copy import logging import math import random import re from collections import Counter from operator import itemgetter import thinkplot import numpy as np import pandas import scipy from scipy import stats from scipy import special from scipy import ndimage from scipy.special import gamma from io import open ROOT2 = math.sqrt(2) def RandomSeed(x): """Initialize the random and np.random generators. x: int seed """ random.seed(x) np.random.seed(x) def Odds(p): """Computes odds for a given probability. Example: p=0.75 means 75 for and 25 against, or 3:1 odds in favor. Note: when p=1, the formula for odds divides by zero, which is normally undefined. But I think it is reasonable to define Odds(1) to be infinity, so that's what this function does. p: float 0-1 Returns: float odds """ if p == 1: return float('inf') return p / (1 - p) def Probability(o): """Computes the probability corresponding to given odds. Example: o=2 means 2:1 odds in favor, or 2/3 probability o: float odds, strictly positive Returns: float probability """ return o / (o + 1) def Probability2(yes, no): """Computes the probability corresponding to given odds. Example: yes=2, no=1 means 2:1 odds in favor, or 2/3 probability. yes, no: int or float odds in favor """ return yes / (yes + no) class Interpolator(object): """Represents a mapping between sorted sequences; performs linear interp. Attributes: xs: sorted list ys: sorted list """ def __init__(self, xs, ys): self.xs = xs self.ys = ys def Lookup(self, x): """Looks up x and returns the corresponding value of y.""" return self._Bisect(x, self.xs, self.ys) def Reverse(self, y): """Looks up y and returns the corresponding value of x.""" return self._Bisect(y, self.ys, self.xs) def _Bisect(self, x, xs, ys): """Helper function.""" if x <= xs[0]: return ys[0] if x >= xs[-1]: return ys[-1] i = bisect.bisect(xs, x) frac = 1.0 * (x - xs[i - 1]) / (xs[i] - xs[i - 1]) y = ys[i - 1] + frac * 1.0 * (ys[i] - ys[i - 1]) return y # When we plot Hist, Pmf and Cdf objects, they don't appear in # the legend unless we override the default label. DEFAULT_LABEL = '_nolegend_' class _DictWrapper(object): """An object that contains a dictionary.""" def __init__(self, obj=None, label=None): """Initializes the distribution. obj: Hist, Pmf, Cdf, Pdf, dict, pandas Series, list of pairs label: string label """ self.label = label if label is not None else DEFAULT_LABEL self.d = {} # flag whether the distribution is under a log transform self.log = False if obj is None: return if isinstance(obj, (_DictWrapper, Cdf, Pdf)): self.label = label if label is not None else obj.label if isinstance(obj, dict): self.d.update(obj.items()) elif isinstance(obj, (_DictWrapper, Cdf, Pdf)): self.d.update(obj.Items()) elif isinstance(obj, pandas.Series): self.d.update(obj.value_counts().iteritems()) else: # finally, treat it like a list self.d.update(Counter(obj)) if len(self) > 0 and isinstance(self, Pmf): self.Normalize() def __hash__(self): return id(self) def __str__(self): cls = self.__class__.__name__ if self.label == DEFAULT_LABEL: return '%s(%s)' % (cls, str(self.d)) else: return self.label def __repr__(self): cls = self.__class__.__name__ if self.label == DEFAULT_LABEL: return '%s(%s)' % (cls, repr(self.d)) else: return '%s(%s, %s)' % (cls, repr(self.d), repr(self.label)) def __eq__(self, other): try: return self.d == other.d except AttributeError: return False def __len__(self): return len(self.d) def __iter__(self): return iter(self.d) def iterkeys(self): """Returns an iterator over keys.""" return iter(self.d) def __contains__(self, value): return value in self.d def __getitem__(self, value): return self.d.get(value, 0) def __setitem__(self, value, prob): self.d[value] = prob def __delitem__(self, value): del self.d[value] def Copy(self, label=None): """Returns a copy. Make a shallow copy of d. If you want a deep copy of d, use copy.deepcopy on the whole object. label: string label for the new Hist returns: new _DictWrapper with the same type """ new = copy.copy(self) new.d = copy.copy(self.d) new.label = label if label is not None else self.label return new def Scale(self, factor): """Multiplies the values by a factor. factor: what to multiply by Returns: new object """ new = self.Copy() new.d.clear() for val, prob in self.Items(): new.Set(val * factor, prob) return new def Log(self, m=None): """Log transforms the probabilities. Removes values with probability 0. Normalizes so that the largest logprob is 0. """ if self.log: raise ValueError("Pmf/Hist already under a log transform") self.log = True if m is None: m = self.MaxLike() for x, p in self.d.items(): if p: self.Set(x, math.log(p / m)) else: self.Remove(x) def Exp(self, m=None): """Exponentiates the probabilities. m: how much to shift the ps before exponentiating If m is None, normalizes so that the largest prob is 1. """ if not self.log: raise ValueError("Pmf/Hist not under a log transform") self.log = False if m is None: m = self.MaxLike() for x, p in self.d.items(): self.Set(x, math.exp(p - m)) def GetDict(self): """Gets the dictionary.""" return self.d def SetDict(self, d): """Sets the dictionary.""" self.d = d def Values(self): """Gets an unsorted sequence of values. Note: one source of confusion is that the keys of this dictionary are the values of the Hist/Pmf, and the values of the dictionary are frequencies/probabilities. """ return self.d.keys() def Items(self): """Gets an unsorted sequence of (value, freq/prob) pairs.""" return self.d.items() def SortedItems(self): """Gets a sorted sequence of (value, freq/prob) pairs. It items are unsortable, the result is unsorted. """ def isnan(x): try: return math.isnan(x) except TypeError: return False if any([isnan(x) for x in self.Values()]): msg = 'Keys contain NaN, may not sort correctly.' logging.warning(msg) try: return sorted(self.d.items()) except TypeError: return self.d.items() def Render(self, **options): """Generates a sequence of points suitable for plotting. Note: options are ignored Returns: tuple of (sorted value sequence, freq/prob sequence) """ return zip(*self.SortedItems()) def MakeCdf(self, label=None): """Makes a Cdf.""" label = label if label is not None else self.label return Cdf(self, label=label) def Print(self): """Prints the values and freqs/probs in ascending order.""" for val, prob in self.SortedItems(): print(val, prob) def Set(self, x, y=0): """Sets the freq/prob associated with the value x. Args: x: number value y: number freq or prob """ self.d[x] = y def Incr(self, x, term=1): """Increments the freq/prob associated with the value x. Args: x: number value term: how much to increment by """ self.d[x] = self.d.get(x, 0) + term def Mult(self, x, factor): """Scales the freq/prob associated with the value x. Args: x: number value factor: how much to multiply by """ self.d[x] = self.d.get(x, 0) * factor def Remove(self, x): """Removes a value. Throws an exception if the value is not there. Args: x: value to remove """ del self.d[x] def Total(self): """Returns the total of the frequencies/probabilities in the map.""" total = sum(self.d.values()) return total def MaxLike(self): """Returns the largest frequency/probability in the map.""" return max(self.d.values()) def Largest(self, n=10): """Returns the largest n values, with frequency/probability. n: number of items to return """ return sorted(self.d.items(), reverse=True)[:n] def Smallest(self, n=10): """Returns the smallest n values, with frequency/probability. n: number of items to return """ return sorted(self.d.items(), reverse=False)[:n] class Hist(_DictWrapper): """Represents a histogram, which is a map from values to frequencies. Values can be any hashable type; frequencies are integer counters. """ def Freq(self, x): """Gets the frequency associated with the value x. Args: x: number value Returns: int frequency """ return self.d.get(x, 0) def Freqs(self, xs): """Gets frequencies for a sequence of values.""" return [self.Freq(x) for x in xs] def IsSubset(self, other): """Checks whether the values in this histogram are a subset of the values in the given histogram.""" for val, freq in self.Items(): if freq > other.Freq(val): return False return True def Subtract(self, other): """Subtracts the values in the given histogram from this histogram.""" for val, freq in other.Items(): self.Incr(val, -freq) class Pmf(_DictWrapper): """Represents a probability mass function. Values can be any hashable type; probabilities are floating-point. Pmfs are not necessarily normalized. """ def Prob(self, x, default=0): """Gets the probability associated with the value x. Args: x: number value default: value to return if the key is not there Returns: float probability """ return self.d.get(x, default) def Probs(self, xs): """Gets probabilities for a sequence of values.""" return [self.Prob(x) for x in xs] def Percentile(self, percentage): """Computes a percentile of a given Pmf. Note: this is not super efficient. If you are planning to compute more than a few percentiles, compute the Cdf. percentage: float 0-100 returns: value from the Pmf """ p = percentage / 100 total = 0 for val, prob in sorted(self.Items()): total += prob if total >= p: return val def ProbGreater(self, x): """Probability that a sample from this Pmf exceeds x. x: number returns: float probability """ if isinstance(x, _DictWrapper): return PmfProbGreater(self, x) else: t = [prob for (val, prob) in self.d.items() if val > x] return sum(t) def ProbLess(self, x): """Probability that a sample from this Pmf is less than x. x: number returns: float probability """ if isinstance(x, _DictWrapper): return PmfProbLess(self, x) else: t = [prob for (val, prob) in self.d.items() if val < x] return sum(t) def ProbEqual(self, x): """Probability that a sample from this Pmf is exactly x. x: number returns: float probability """ if isinstance(x, _DictWrapper): return PmfProbEqual(self, x) else: return self[x] # NOTE: I've decided to remove the magic comparators because they # have the side-effect of making Pmf sortable, but in fact they # don't support sorting. def Normalize(self, fraction=1): """Normalizes this PMF so the sum of all probs is fraction. Args: fraction: what the total should be after normalization Returns: the total probability before normalizing """ if self.log: raise ValueError("Normalize: Pmf is under a log transform") total = self.Total() if total == 0: raise ValueError('Normalize: total probability is zero.') factor = fraction / total for x in self.d: self.d[x] *= factor return total def Random(self): """Chooses a random element from this PMF. Note: this is not very efficient. If you plan to call this more than a few times, consider converting to a CDF. Returns: float value from the Pmf """ target = random.random() total = 0 for x, p in self.d.items(): total += p if total >= target: return x # we shouldn't get here raise ValueError('Random: Pmf might not be normalized.') def Sample(self, n): """Generates a random sample from this distribution. n: int length of the sample returns: NumPy array """ return self.MakeCdf().Sample(n) def Mean(self): """Computes the mean of a PMF. Returns: float mean """ return sum(p * x for x, p in self.Items()) def Median(self): """Computes the median of a PMF. Returns: float median """ return self.MakeCdf().Percentile(50) def Var(self, mu=None): """Computes the variance of a PMF. mu: the point around which the variance is computed; if omitted, computes the mean returns: float variance """ if mu is None: mu = self.Mean() return sum(p * (x-mu)**2 for x, p in self.Items()) def Expect(self, func): """Computes the expectation of func(x). Returns: expectation """ return np.sum(p * func(x) for x, p in self.Items()) def Std(self, mu=None): """Computes the standard deviation of a PMF. mu: the point around which the variance is computed; if omitted, computes the mean returns: float standard deviation """ var = self.Var(mu) return math.sqrt(var) def Mode(self): """Returns the value with the highest probability. Returns: float probability """ _, val = max((prob, val) for val, prob in self.Items()) return val # The mode of a posterior is the maximum aposteori probability (MAP) MAP = Mode # If the distribution contains likelihoods only, the peak is the # maximum likelihood estimator. MaximumLikelihood = Mode def CredibleInterval(self, percentage=90): """Computes the central credible interval. If percentage=90, computes the 90% CI. Args: percentage: float between 0 and 100 Returns: sequence of two floats, low and high """ cdf = self.MakeCdf() return cdf.CredibleInterval(percentage) def __add__(self, other): """Computes the Pmf of the sum of values drawn from self and other. other: another Pmf or a scalar returns: new Pmf """ try: return self.AddPmf(other) except AttributeError: return self.AddConstant(other) __radd__ = __add__ def AddPmf(self, other): """Computes the Pmf of the sum of values drawn from self and other. other: another Pmf returns: new Pmf """ pmf = Pmf() for v1, p1 in self.Items(): for v2, p2 in other.Items(): pmf[v1 + v2] += p1 * p2 return pmf def AddConstant(self, other): """Computes the Pmf of the sum a constant and values from self. other: a number returns: new Pmf """ if other == 0: return self.Copy() pmf = Pmf() for v1, p1 in self.Items(): pmf.Set(v1 + other, p1) return pmf def __sub__(self, other): """Computes the Pmf of the diff of values drawn from self and other. other: another Pmf returns: new Pmf """ try: return self.SubPmf(other) except AttributeError: return self.AddConstant(-other) def SubPmf(self, other): """Computes the Pmf of the diff of values drawn from self and other. other: another Pmf returns: new Pmf """ pmf = Pmf() for v1, p1 in self.Items(): for v2, p2 in other.Items(): pmf.Incr(v1 - v2, p1 * p2) return pmf def __mul__(self, other): """Computes the Pmf of the product of values drawn from self and other. other: another Pmf returns: new Pmf """ try: return self.MulPmf(other) except AttributeError: return self.MulConstant(other) def MulPmf(self, other): """Computes the Pmf of the diff of values drawn from self and other. other: another Pmf returns: new Pmf """ pmf = Pmf() for v1, p1 in self.Items(): for v2, p2 in other.Items(): pmf.Incr(v1 * v2, p1 * p2) return pmf def MulConstant(self, other): """Computes the Pmf of the product of a constant and values from self. other: a number returns: new Pmf """ pmf = Pmf() for v1, p1 in self.Items(): pmf.Set(v1 * other, p1) return pmf def __div__(self, other): """Computes the Pmf of the ratio of values drawn from self and other. other: another Pmf returns: new Pmf """ try: return self.DivPmf(other) except AttributeError: return self.MulConstant(1/other) __truediv__ = __div__ def DivPmf(self, other): """Computes the Pmf of the ratio of values drawn from self and other. other: another Pmf returns: new Pmf """ pmf = Pmf() for v1, p1 in self.Items(): for v2, p2 in other.Items(): pmf.Incr(v1 / v2, p1 * p2) return pmf def Max(self, k): """Computes the CDF of the maximum of k selections from this dist. k: int returns: new Cdf """ cdf = self.MakeCdf() cdf.ps **= k return cdf class Joint(Pmf): """Represents a joint distribution. The values are sequences (usually tuples) """ def Marginal(self, i, label=None): """Gets the marginal distribution of the indicated variable. i: index of the variable we want Returns: Pmf """ pmf = Pmf(label=label) for vs, prob in self.Items(): pmf.Incr(vs[i], prob) return pmf def Conditional(self, i, j, val, label=None): """Gets the conditional distribution of the indicated variable. Distribution of vs[i], conditioned on vs[j] = val. i: index of the variable we want j: which variable is conditioned on val: the value the jth variable has to have Returns: Pmf """ pmf = Pmf(label=label) for vs, prob in self.Items(): if vs[j] != val: continue pmf.Incr(vs[i], prob) pmf.Normalize() return pmf def MaxLikeInterval(self, percentage=90): """Returns the maximum-likelihood credible interval. If percentage=90, computes a 90% CI containing the values with the highest likelihoods. percentage: float between 0 and 100 Returns: list of values from the suite """ interval = [] total = 0 t = [(prob, val) for val, prob in self.Items()] t.sort(reverse=True) for prob, val in t: interval.append(val) total += prob if total >= percentage / 100: break return interval def MakeJoint(pmf1, pmf2): """Joint distribution of values from pmf1 and pmf2. Assumes that the PMFs represent independent random variables. Args: pmf1: Pmf object pmf2: Pmf object Returns: Joint pmf of value pairs """ joint = Joint() for v1, p1 in pmf1.Items(): for v2, p2 in pmf2.Items(): joint.Set((v1, v2), p1 * p2) return joint def MakeHistFromList(t, label=None): """Makes a histogram from an unsorted sequence of values. Args: t: sequence of numbers label: string label for this histogram Returns: Hist object """ return Hist(t, label=label) def MakeHistFromDict(d, label=None): """Makes a histogram from a map from values to frequencies. Args: d: dictionary that maps values to frequencies label: string label for this histogram Returns: Hist object """ return Hist(d, label) def MakePmfFromList(t, label=None): """Makes a PMF from an unsorted sequence of values. Args: t: sequence of numbers label: string label for this PMF Returns: Pmf object """ return Pmf(t, label=label) def MakePmfFromDict(d, label=None): """Makes a PMF from a map from values to probabilities. Args: d: dictionary that maps values to probabilities label: string label for this PMF Returns: Pmf object """ return Pmf(d, label=label) def MakePmfFromItems(t, label=None): """Makes a PMF from a sequence of value-probability pairs Args: t: sequence of value-probability pairs label: string label for this PMF Returns: Pmf object """ return Pmf(dict(t), label=label) def MakePmfFromHist(hist, label=None): """Makes a normalized PMF from a Hist object. Args: hist: Hist object label: string label Returns: Pmf object """ if label is None: label = hist.label return Pmf(hist, label=label) def MakeMixture(metapmf, label='mix'): """Make a mixture distribution. Args: metapmf: Pmf that maps from Pmfs to probs. label: string label for the new Pmf. Returns: Pmf object. """ mix = Pmf(label=label) for pmf, p1 in metapmf.Items(): for x, p2 in pmf.Items(): mix[x] += p1 * p2 return mix def MakeUniformPmf(low, high, n): """Make a uniform Pmf. low: lowest value (inclusive) high: highest value (inclusize) n: number of values """ pmf = Pmf() for x in np.linspace(low, high, n): pmf.Set(x, 1) pmf.Normalize() return pmf class Cdf: """Represents a cumulative distribution function. Attributes: xs: sequence of values ps: sequence of probabilities label: string used as a graph label. """ def __init__(self, obj=None, ps=None, label=None): """Initializes. If ps is provided, obj must be the corresponding list of values. obj: Hist, Pmf, Cdf, Pdf, dict, pandas Series, list of pairs ps: list of cumulative probabilities label: string label """ self.label = label if label is not None else DEFAULT_LABEL if isinstance(obj, (_DictWrapper, Cdf, Pdf)): if not label: self.label = label if label is not None else obj.label if obj is None: # caller does not provide obj, make an empty Cdf self.xs = np.asarray([]) self.ps = np.asarray([]) if ps is not None: logging.warning("Cdf: can't pass ps without also passing xs.") return else: # if the caller provides xs and ps, just store them if ps is not None: if isinstance(ps, str): logging.warning("Cdf: ps can't be a string") self.xs = np.asarray(obj) self.ps = np.asarray(ps) return # caller has provided just obj, not ps if isinstance(obj, Cdf): self.xs = copy.copy(obj.xs) self.ps = copy.copy(obj.ps) return if isinstance(obj, _DictWrapper): dw = obj else: dw = Hist(obj) if len(dw) == 0: self.xs = np.asarray([]) self.ps = np.asarray([]) return xs, freqs = zip(*sorted(dw.Items())) self.xs = np.asarray(xs) self.ps = np.cumsum(freqs, dtype=np.float) self.ps /= self.ps[-1] def __str__(self): cls = self.__class__.__name__ if self.label == DEFAULT_LABEL: return '%s(%s, %s)' % (cls, str(self.xs), str(self.ps)) else: return self.label def __repr__(self): cls = self.__class__.__name__ if self.label == DEFAULT_LABEL: return '%s(%s, %s)' % (cls, str(self.xs), str(self.ps)) else: return '%s(%s, %s, %s)' % (cls, str(self.xs), str(self.ps), repr(self.label)) def __len__(self): return len(self.xs) def __getitem__(self, x): return self.Prob(x) def __setitem__(self): raise UnimplementedMethodException() def __delitem__(self): raise UnimplementedMethodException() def __eq__(self, other): return np.all(self.xs == other.xs) and np.all(self.ps == other.ps) def Print(self): """Prints the values and freqs/probs in ascending order.""" for val, prob in zip(self.xs, self.ps): print(val, prob) def Copy(self, label=None): """Returns a copy of this Cdf. label: string label for the new Cdf """ if label is None: label = self.label return Cdf(list(self.xs), list(self.ps), label=label) def MakePmf(self, label=None): """Makes a Pmf.""" if label is None: label = self.label return Pmf(self, label=label) def Items(self): """Returns a sorted sequence of (value, probability) pairs. Note: in Python3, returns an iterator. """ a = self.ps b = np.roll(a, 1) b[0] = 0 return zip(self.xs, a-b) def Shift(self, term): """Adds a term to the xs. term: how much to add """ new = self.Copy() # don't use +=, or else an int array + float yields int array new.xs = new.xs + term return new def Scale(self, factor): """Multiplies the xs by a factor. factor: what to multiply by """ new = self.Copy() # don't use *=, or else an int array * float yields int array new.xs = new.xs * factor return new def Prob(self, x): """Returns CDF(x), the probability that corresponds to value x. Args: x: number Returns: float probability """ if x < self.xs[0]: return 0 index = bisect.bisect(self.xs, x) p = self.ps[index-1] return p def Probs(self, xs): """Gets probabilities for a sequence of values. xs: any sequence that can be converted to NumPy array returns: NumPy array of cumulative probabilities """ xs = np.asarray(xs) index = np.searchsorted(self.xs, xs, side='right') ps = self.ps[index-1] ps[xs < self.xs[0]] = 0 return ps ProbArray = Probs def Value(self, p): """Returns InverseCDF(p), the value that corresponds to probability p. Args: p: number in the range [0, 1] Returns: number value """ if p < 0 or p > 1: raise ValueError('Probability p must be in range [0, 1]') index = bisect.bisect_left(self.ps, p) return self.xs[index] def Values(self, ps=None): """Returns InverseCDF(p), the value that corresponds to probability p. If ps is not provided, returns all values. Args: ps: NumPy array of numbers in the range [0, 1] Returns: NumPy array of values """ if ps is None: return self.xs ps = np.asarray(ps) if np.any(ps < 0) or np.any(ps > 1): raise ValueError('Probability p must be in range [0, 1]') index = np.searchsorted(self.ps, ps, side='left') return self.xs[index] ValueArray = Values def Percentile(self, p): """Returns the value that corresponds to percentile p. Args: p: number in the range [0, 100] Returns: number value """ return self.Value(p / 100) def Percentiles(self, ps): """Returns the value that corresponds to percentiles ps. Args: ps: numbers in the range [0, 100] Returns: array of values """ ps = np.asarray(ps) return self.Values(ps / 100) def PercentileRank(self, x): """Returns the percentile rank of the value x. x: potential value in the CDF returns: percentile rank in the range 0 to 100 """ return self.Prob(x) * 100 def PercentileRanks(self, xs): """Returns the percentile ranks of the values in xs. xs: potential value in the CDF returns: array of percentile ranks in the range 0 to 100 """ return self.Probs(x) * 100 def Random(self): """Chooses a random value from this distribution.""" return self.Value(random.random()) def Sample(self, n): """Generates a random sample from this distribution. n: int length of the sample returns: NumPy array """ ps = np.random.random(n) return self.ValueArray(ps) def Mean(self): """Computes the mean of a CDF. Returns: float mean """ old_p = 0 total = 0 for x, new_p in zip(self.xs, self.ps): p = new_p - old_p total += p * x old_p = new_p return total def CredibleInterval(self, percentage=90): """Computes the central credible interval. If percentage=90, computes the 90% CI. Args: percentage: float between 0 and 100 Returns: sequence of two floats, low and high """ prob = (1 - percentage / 100) / 2 interval = self.Value(prob), self.Value(1 - prob) return interval ConfidenceInterval = CredibleInterval def _Round(self, multiplier=1000): """ An entry is added to the cdf only if the percentile differs from the previous value in a significant digit, where the number of significant digits is determined by multiplier. The default is 1000, which keeps log10(1000) = 3 significant digits. """ # TODO(write this method) raise UnimplementedMethodException() def Render(self, **options): """Generates a sequence of points suitable for plotting. An empirical CDF is a step function; linear interpolation can be misleading. Note: options are ignored Returns: tuple of (xs, ps) """ def interleave(a, b): c = np.empty(a.shape[0] + b.shape[0]) c[::2] = a c[1::2] = b return c a = np.array(self.xs) xs = interleave(a, a) shift_ps = np.roll(self.ps, 1) shift_ps[0] = 0 ps = interleave(shift_ps, self.ps) return xs, ps def Max(self, k): """Computes the CDF of the maximum of k selections from this dist. k: int returns: new Cdf """ cdf = self.Copy() cdf.ps **= k return cdf def MakeCdfFromItems(items, label=None): """Makes a cdf from an unsorted sequence of (value, frequency) pairs. Args: items: unsorted sequence of (value, frequency) pairs label: string label for this CDF Returns: cdf: list of (value, fraction) pairs """ return Cdf(dict(items), label=label) def MakeCdfFromDict(d, label=None): """Makes a CDF from a dictionary that maps values to frequencies. Args: d: dictionary that maps values to frequencies. label: string label for the data. Returns: Cdf object """ return Cdf(d, label=label) def MakeCdfFromList(seq, label=None): """Creates a CDF from an unsorted sequence. Args: seq: unsorted sequence of sortable values label: string label for the cdf Returns: Cdf object """ return Cdf(seq, label=label) def MakeCdfFromHist(hist, label=None): """Makes a CDF from a Hist object. Args: hist: Pmf.Hist object label: string label for the data. Returns: Cdf object """ if label is None: label = hist.label return Cdf(hist, label=label) def MakeCdfFromPmf(pmf, label=None): """Makes a CDF from a Pmf object. Args: pmf: Pmf.Pmf object label: string label for the data. Returns: Cdf object """ if label is None: label = pmf.label return Cdf(pmf, label=label) class UnimplementedMethodException(Exception): """Exception if someone calls a method that should be overridden.""" class Suite(Pmf): """Represents a suite of hypotheses and their probabilities.""" def Update(self, data): """Updates each hypothesis based on the data. data: any representation of the data returns: the normalizing constant """ for hypo in self.Values(): like = self.Likelihood(data, hypo) self.Mult(hypo, like) return self.Normalize() def LogUpdate(self, data): """Updates a suite of hypotheses based on new data. Modifies the suite directly; if you want to keep the original, make a copy. Note: unlike Update, LogUpdate does not normalize. Args: data: any representation of the data """ for hypo in self.Values(): like = self.LogLikelihood(data, hypo) self.Incr(hypo, like) def UpdateSet(self, dataset): """Updates each hypothesis based on the dataset. This is more efficient than calling Update repeatedly because it waits until the end to Normalize. Modifies the suite directly; if you want to keep the original, make a copy. dataset: a sequence of data returns: the normalizing constant """ for data in dataset: for hypo in self.Values(): like = self.Likelihood(data, hypo) self.Mult(hypo, like) return self.Normalize() def LogUpdateSet(self, dataset): """Updates each hypothesis based on the dataset. Modifies the suite directly; if you want to keep the original, make a copy. dataset: a sequence of data returns: None """ for data in dataset: self.LogUpdate(data) def Likelihood(self, data, hypo): """Computes the likelihood of the data under the hypothesis. hypo: some representation of the hypothesis data: some representation of the data """ raise UnimplementedMethodException() def LogLikelihood(self, data, hypo): """Computes the log likelihood of the data under the hypothesis. hypo: some representation of the hypothesis data: some representation of the data """ raise UnimplementedMethodException() def Print(self): """Prints the hypotheses and their probabilities.""" for hypo, prob in sorted(self.Items()): print(hypo, prob) def MakeOdds(self): """Transforms from probabilities to odds. Values with prob=0 are removed. """ for hypo, prob in self.Items(): if prob: self.Set(hypo, Odds(prob)) else: self.Remove(hypo) def MakeProbs(self): """Transforms from odds to probabilities.""" for hypo, odds in self.Items(): self.Set(hypo, Probability(odds)) def MakeSuiteFromList(t, label=None): """Makes a suite from an unsorted sequence of values. Args: t: sequence of numbers label: string label for this suite Returns: Suite object """ hist = MakeHistFromList(t, label=label) d = hist.GetDict() return MakeSuiteFromDict(d) def MakeSuiteFromHist(hist, label=None): """Makes a normalized suite from a Hist object. Args: hist: Hist object label: string label Returns: Suite object """ if label is None: label = hist.label # make a copy of the dictionary d = dict(hist.GetDict()) return MakeSuiteFromDict(d, label) def MakeSuiteFromDict(d, label=None): """Makes a suite from a map from values to probabilities. Args: d: dictionary that maps values to probabilities label: string label for this suite Returns: Suite object """ suite = Suite(label=label) suite.SetDict(d) suite.Normalize() return suite class Pdf(object): """Represents a probability density function (PDF).""" def Density(self, x): """Evaluates this Pdf at x. Returns: float or NumPy array of probability density """ raise UnimplementedMethodException() def GetLinspace(self): """Get a linspace for plotting. Not all subclasses of Pdf implement this. Returns: numpy array """ raise UnimplementedMethodException() def MakePmf(self, **options): """Makes a discrete version of this Pdf. options can include label: string low: low end of range high: high end of range n: number of places to evaluate Returns: new Pmf """ label = options.pop('label', '') xs, ds = self.Render(**options) return Pmf(dict(zip(xs, ds)), label=label) def Render(self, **options): """Generates a sequence of points suitable for plotting. If options includes low and high, it must also include n; in that case the density is evaluated an n locations between low and high, including both. If options includes xs, the density is evaluate at those location. Otherwise, self.GetLinspace is invoked to provide the locations. Returns: tuple of (xs, densities) """ low, high = options.pop('low', None), options.pop('high', None) if low is not None and high is not None: n = options.pop('n', 101) xs = np.linspace(low, high, n) else: xs = options.pop('xs', None) if xs is None: xs = self.GetLinspace() ds = self.Density(xs) return xs, ds def Items(self): """Generates a sequence of (value, probability) pairs. """ return zip(*self.Render()) class NormalPdf(Pdf): """Represents the PDF of a Normal distribution.""" def __init__(self, mu=0, sigma=1, label=None): """Constructs a Normal Pdf with given mu and sigma. mu: mean sigma: standard deviation label: string """ self.mu = mu self.sigma = sigma self.label = label if label is not None else '_nolegend_' def __str__(self): return 'NormalPdf(%f, %f)' % (self.mu, self.sigma) def GetLinspace(self): """Get a linspace for plotting. Returns: numpy array """ low, high = self.mu-3*self.sigma, self.mu+3*self.sigma return np.linspace(low, high, 101) def Density(self, xs): """Evaluates this Pdf at xs. xs: scalar or sequence of floats returns: float or NumPy array of probability density """ return stats.norm.pdf(xs, self.mu, self.sigma) class ExponentialPdf(Pdf): """Represents the PDF of an exponential distribution.""" def __init__(self, lam=1, label=None): """Constructs an exponential Pdf with given parameter. lam: rate parameter label: string """ self.lam = lam self.label = label if label is not None else '_nolegend_' def __str__(self): return 'ExponentialPdf(%f)' % (self.lam) def GetLinspace(self): """Get a linspace for plotting. Returns: numpy array """ low, high = 0, 5.0/self.lam return np.linspace(low, high, 101) def Density(self, xs): """Evaluates this Pdf at xs. xs: scalar or sequence of floats returns: float or NumPy array of probability density """ return stats.expon.pdf(xs, scale=1.0/self.lam) class EstimatedPdf(Pdf): """Represents a PDF estimated by KDE.""" def __init__(self, sample, label=None): """Estimates the density function based on a sample. sample: sequence of data label: string """ self.label = label if label is not None else '_nolegend_' self.kde = stats.gaussian_kde(sample) low = min(sample) high = max(sample) self.linspace = np.linspace(low, high, 101) def __str__(self): return 'EstimatedPdf(label=%s)' % str(self.label) def GetLinspace(self): """Get a linspace for plotting. Returns: numpy array """ return self.linspace def Density(self, xs): """Evaluates this Pdf at xs. returns: float or NumPy array of probability density """ return self.kde.evaluate(xs) def Sample(self, n): """Generates a random sample from the estimated Pdf. n: size of sample """ # NOTE: we have to flatten because resample returns a 2-D # array for some reason. return self.kde.resample(n).flatten() def CredibleInterval(pmf, percentage=90): """Computes a credible interval for a given distribution. If percentage=90, computes the 90% CI. Args: pmf: Pmf object representing a posterior distribution percentage: float between 0 and 100 Returns: sequence of two floats, low and high """ cdf = pmf.MakeCdf() prob = (1 - percentage / 100) / 2 interval = cdf.Value(prob), cdf.Value(1 - prob) return interval def PmfProbLess(pmf1, pmf2): """Probability that a value from pmf1 is less than a value from pmf2. Args: pmf1: Pmf object pmf2: Pmf object Returns: float probability """ total = 0 for v1, p1 in pmf1.Items(): for v2, p2 in pmf2.Items(): if v1 < v2: total += p1 * p2 return total def PmfProbGreater(pmf1, pmf2): """Probability that a value from pmf1 is less than a value from pmf2. Args: pmf1: Pmf object pmf2: Pmf object Returns: float probability """ total = 0 for v1, p1 in pmf1.Items(): for v2, p2 in pmf2.Items(): if v1 > v2: total += p1 * p2 return total def PmfProbEqual(pmf1, pmf2): """Probability that a value from pmf1 equals a value from pmf2. Args: pmf1: Pmf object pmf2: Pmf object Returns: float probability """ total = 0 for v1, p1 in pmf1.Items(): for v2, p2 in pmf2.Items(): if v1 == v2: total += p1 * p2 return total def RandomSum(dists): """Chooses a random value from each dist and returns the sum. dists: sequence of Pmf or Cdf objects returns: numerical sum """ total = sum(dist.Random() for dist in dists) return total def SampleSum(dists, n): """Draws a sample of sums from a list of distributions. dists: sequence of Pmf or Cdf objects n: sample size returns: new Pmf of sums """ pmf = Pmf(RandomSum(dists) for i in range(n)) return pmf def EvalNormalPdf(x, mu, sigma): """Computes the unnormalized PDF of the normal distribution. x: value mu: mean sigma: standard deviation returns: float probability density """ return stats.norm.pdf(x, mu, sigma) def MakeNormalPmf(mu, sigma, num_sigmas, n=201): """Makes a PMF discrete approx to a Normal distribution. mu: float mean sigma: float standard deviation num_sigmas: how many sigmas to extend in each direction n: number of values in the Pmf returns: normalized Pmf """ pmf = Pmf() low = mu - num_sigmas * sigma high = mu + num_sigmas * sigma for x in np.linspace(low, high, n): p = EvalNormalPdf(x, mu, sigma) pmf.Set(x, p) pmf.Normalize() return pmf def EvalBinomialPmf(k, n, p): """Evaluates the binomial PMF. Returns the probabily of k successes in n trials with probability p. """ return stats.binom.pmf(k, n, p) def MakeBinomialPmf(n, p): """Evaluates the binomial PMF. Returns the distribution of successes in n trials with probability p. """ pmf = Pmf() for k in range(n+1): pmf[k] = stats.binom.pmf(k, n, p) return pmf def EvalGammaPdf(x, a): """Computes the Gamma PDF. x: where to evaluate the PDF a: parameter of the gamma distribution returns: float probability """ return x**(a-1) * np.exp(-x) / gamma(a) def MakeGammaPmf(xs, a): """Makes a PMF discrete approx to a Gamma distribution. lam: parameter lambda in events per unit time xs: upper bound of the Pmf returns: normalized Pmf """ xs = np.asarray(xs) ps = EvalGammaPdf(xs, a) pmf = Pmf(dict(zip(xs, ps))) pmf.Normalize() return pmf def EvalGeometricPmf(k, p, loc=0): """Evaluates the geometric PMF. With loc=0: Probability of `k` trials to get one success. With loc=-1: Probability of `k` trials before first success. k: number of trials p: probability of success on each trial """ return stats.geom.pmf(k, p, loc=loc) def MakeGeometricPmf(p, loc=0, high=10): """Evaluates the binomial PMF. With loc=0: PMF of trials to get one success. With loc=-1: PMF of trials before first success. p: probability of success high: upper bound where PMF is truncated """ pmf = Pmf() for k in range(high): pmf[k] = stats.geom.pmf(k, p, loc=loc) pmf.Normalize() return pmf def EvalHypergeomPmf(k, N, K, n): """Evaluates the hypergeometric PMF. Returns the probabily of k successes in n trials from a population N with K successes in it. """ return stats.hypergeom.pmf(k, N, K, n) def EvalPoissonPmf(k, lam): """Computes the Poisson PMF. k: number of events lam: parameter lambda in events per unit time returns: float probability """ return stats.poisson.pmf(k, lam) def MakePoissonPmf(lam, high, step=1): """Makes a PMF discrete approx to a Poisson distribution. lam: parameter lambda in events per unit time high: upper bound of the Pmf returns: normalized Pmf """ pmf = Pmf() for k in range(0, high + 1, step): p = stats.poisson.pmf(k, lam) pmf.Set(k, p) pmf.Normalize() return pmf def EvalExponentialPdf(x, lam): """Computes the exponential PDF. x: value lam: parameter lambda in events per unit time returns: float probability density """ return lam * math.exp(-lam * x) def EvalExponentialCdf(x, lam): """Evaluates CDF of the exponential distribution with parameter lam.""" return 1 - math.exp(-lam * x) def MakeExponentialPmf(lam, high, n=200): """Makes a PMF discrete approx to an exponential distribution. lam: parameter lambda in events per unit time high: upper bound n: number of values in the Pmf returns: normalized Pmf """ pmf = Pmf() for x in np.linspace(0, high, n): p = EvalExponentialPdf(x, lam) pmf.Set(x, p) pmf.Normalize() return pmf def EvalWeibullPdf(x, lam, k): """Computes the Weibull PDF. x: value lam: parameter lambda in events per unit time k: parameter returns: float probability density """ arg = (x / lam) return k / lam * arg**(k-1) * np.exp(-arg**k) def EvalWeibullCdf(x, lam, k): """Evaluates CDF of the Weibull distribution.""" arg = (x / lam) return 1 - np.exp(-arg**k) def MakeWeibullPmf(lam, k, high, n=200): """Makes a PMF discrete approx to a Weibull distribution. lam: parameter lambda in events per unit time k: parameter high: upper bound n: number of values in the Pmf returns: normalized Pmf """ xs = np.linspace(0, high, n) ps = EvalWeibullPdf(xs, lam, k) ps[np.isinf(ps)] = 0 return Pmf(dict(zip(xs, ps))) def EvalParetoPdf(x, xm, alpha): """Computes the Pareto. xm: minimum value (scale parameter) alpha: shape parameter returns: float probability density """ return stats.pareto.pdf(x, alpha, scale=xm) def MakeParetoPmf(xm, alpha, high, num=101): """Makes a PMF discrete approx to a Pareto distribution. xm: minimum value (scale parameter) alpha: shape parameter high: upper bound value num: number of values returns: normalized Pmf """ xs = np.linspace(xm, high, num) ps = stats.pareto.pdf(xs, alpha, scale=xm) pmf = Pmf(dict(zip(xs, ps))) return pmf def StandardNormalCdf(x): """Evaluates the CDF of the standard Normal distribution. See http://en.wikipedia.org/wiki/Normal_distribution #Cumulative_distribution_function Args: x: float Returns: float """ return (math.erf(x / ROOT2) + 1) / 2 def EvalNormalCdf(x, mu=0, sigma=1): """Evaluates the CDF of the normal distribution. Args: x: float mu: mean parameter sigma: standard deviation parameter Returns: float """ return stats.norm.cdf(x, loc=mu, scale=sigma) def EvalNormalCdfInverse(p, mu=0, sigma=1): """Evaluates the inverse CDF of the normal distribution. See http://en.wikipedia.org/wiki/Normal_distribution#Quantile_function Args: p: float mu: mean parameter sigma: standard deviation parameter Returns: float """ return stats.norm.ppf(p, loc=mu, scale=sigma) def EvalLognormalCdf(x, mu=0, sigma=1): """Evaluates the CDF of the lognormal distribution. x: float or sequence mu: mean parameter sigma: standard deviation parameter Returns: float or sequence """ return stats.lognorm.cdf(x, loc=mu, scale=sigma) def RenderExpoCdf(lam, low, high, n=101): """Generates sequences of xs and ps for an exponential CDF. lam: parameter low: float high: float n: number of points to render returns: numpy arrays (xs, ps) """ xs = np.linspace(low, high, n) ps = 1 - np.exp(-lam * xs) #ps = stats.expon.cdf(xs, scale=1.0/lam) return xs, ps def RenderNormalCdf(mu, sigma, low, high, n=101): """Generates sequences of xs and ps for a Normal CDF. mu: parameter sigma: parameter low: float high: float n: number of points to render returns: numpy arrays (xs, ps) """ xs = np.linspace(low, high, n) ps = stats.norm.cdf(xs, mu, sigma) return xs, ps def RenderParetoCdf(xmin, alpha, low, high, n=50): """Generates sequences of xs and ps for a Pareto CDF. xmin: parameter alpha: parameter low: float high: float n: number of points to render returns: numpy arrays (xs, ps) """ if low < xmin: low = xmin xs = np.linspace(low, high, n) ps = 1 - (xs / xmin) ** -alpha #ps = stats.pareto.cdf(xs, scale=xmin, b=alpha) return xs, ps class Beta: """Represents a Beta distribution. See http://en.wikipedia.org/wiki/Beta_distribution """ def __init__(self, alpha=1, beta=1, label=None): """Initializes a Beta distribution.""" self.alpha = alpha self.beta = beta self.label = label if label is not None else '_nolegend_' def Update(self, data): """Updates a Beta distribution. data: pair of int (heads, tails) """ heads, tails = data self.alpha += heads self.beta += tails def Mean(self): """Computes the mean of this distribution.""" return self.alpha / (self.alpha + self.beta) def MAP(self): """Computes the value with maximum a posteori probability.""" a = self.alpha - 1 b = self.beta - 1 return a / (a + b) def Random(self): """Generates a random variate from this distribution.""" return random.betavariate(self.alpha, self.beta) def Sample(self, n): """Generates a random sample from this distribution. n: int sample size """ size = n, return np.random.beta(self.alpha, self.beta, size) def EvalPdf(self, x): """Evaluates the PDF at x.""" return x ** (self.alpha - 1) * (1 - x) ** (self.beta - 1) def MakePmf(self, steps=101, label=None): """Returns a Pmf of this distribution. Note: Normally, we just evaluate the PDF at a sequence of points and treat the probability density as a probability mass. But if alpha or beta is less than one, we have to be more careful because the PDF goes to infinity at x=0 and x=1. In that case we evaluate the CDF and compute differences. The result is a little funny, because the values at 0 and 1 are not symmetric. Nevertheless, it is a reasonable discrete model of the continuous distribution, and behaves well as the number of values increases. """ if label is None and self.label is not None: label = self.label if self.alpha < 1 or self.beta < 1: cdf = self.MakeCdf() pmf = cdf.MakePmf() return pmf xs = [i / (steps - 1.0) for i in range(steps)] probs = [self.EvalPdf(x) for x in xs] pmf = Pmf(dict(zip(xs, probs)), label=label) return pmf def MakeCdf(self, steps=101): """Returns the CDF of this distribution.""" xs = [i / (steps - 1.0) for i in range(steps)] ps = special.betainc(self.alpha, self.beta, xs) cdf = Cdf(xs, ps) return cdf def Percentile(self, ps): """Returns the given percentiles from this distribution. ps: scalar, array, or list of [0-100] """ ps = np.asarray(ps) / 100 xs = special.betaincinv(self.alpha, self.beta, ps) return xs class Dirichlet(object): """Represents a Dirichlet distribution. See http://en.wikipedia.org/wiki/Dirichlet_distribution """ def __init__(self, n, conc=1, label=None): """Initializes a Dirichlet distribution. n: number of dimensions conc: concentration parameter (smaller yields more concentration) label: string label """ if n < 2: raise ValueError('A Dirichlet distribution with ' 'n<2 makes no sense') self.n = n self.params = np.ones(n, dtype=np.float) * conc self.label = label if label is not None else '_nolegend_' def Update(self, data): """Updates a Dirichlet distribution. data: sequence of observations, in order corresponding to params """ m = len(data) self.params[:m] += data def Random(self): """Generates a random variate from this distribution. Returns: normalized vector of fractions """ p = np.random.gamma(self.params) return p / p.sum() def Likelihood(self, data): """Computes the likelihood of the data. Selects a random vector of probabilities from this distribution. Returns: float probability """ m = len(data) if self.n < m: return 0 x = data p = self.Random() q = p[:m] ** x return q.prod() def LogLikelihood(self, data): """Computes the log likelihood of the data. Selects a random vector of probabilities from this distribution. Returns: float log probability """ m = len(data) if self.n < m: return float('-inf') x = self.Random() y = np.log(x[:m]) * data return y.sum() def MarginalBeta(self, i): """Computes the marginal distribution of the ith element. See http://en.wikipedia.org/wiki/Dirichlet_distribution #Marginal_distributions i: int Returns: Beta object """ alpha0 = self.params.sum() alpha = self.params[i] return Beta(alpha, alpha0 - alpha) def PredictivePmf(self, xs, label=None): """Makes a predictive distribution. xs: values to go into the Pmf Returns: Pmf that maps from x to the mean prevalence of x """ alpha0 = self.params.sum() ps = self.params / alpha0 return Pmf(zip(xs, ps), label=label) def BinomialCoef(n, k): """Compute the binomial coefficient "n choose k". n: number of trials k: number of successes Returns: float """ return scipy.misc.comb(n, k) def LogBinomialCoef(n, k): """Computes the log of the binomial coefficient. http://math.stackexchange.com/questions/64716/ approximating-the-logarithm-of-the-binomial-coefficient n: number of trials k: number of successes Returns: float """ return n * math.log(n) - k * math.log(k) - (n - k) * math.log(n - k) def NormalProbability(ys, jitter=0): """Generates data for a normal probability plot. ys: sequence of values jitter: float magnitude of jitter added to the ys returns: numpy arrays xs, ys """ n = len(ys) xs = np.random.normal(0, 1, n) xs.sort() if jitter: ys = Jitter(ys, jitter) else: ys = np.array(ys) ys.sort() return xs, ys def Jitter(values, jitter=0.5): """Jitters the values by adding a uniform variate in (-jitter, jitter). values: sequence jitter: scalar magnitude of jitter returns: new numpy array """ n = len(values) return np.random.normal(0, jitter, n) + values def NormalProbabilityPlot(sample, fit_color='0.8', **options): """Makes a normal probability plot with a fitted line. sample: sequence of numbers fit_color: color string for the fitted line options: passed along to Plot """ xs, ys = NormalProbability(sample) mean, var = MeanVar(sample) std = math.sqrt(var) fit = FitLine(xs, mean, std) thinkplot.Plot(*fit, color=fit_color, label='model') xs, ys = NormalProbability(sample) thinkplot.Plot(xs, ys, **options) def Mean(xs): """Computes mean. xs: sequence of values returns: float mean """ return np.mean(xs) def Var(xs, mu=None, ddof=0): """Computes variance. xs: sequence of values mu: option known mean ddof: delta degrees of freedom returns: float """ xs = np.asarray(xs) if mu is None: mu = xs.mean() ds = xs - mu return np.dot(ds, ds) / (len(xs) - ddof) def Std(xs, mu=None, ddof=0): """Computes standard deviation. xs: sequence of values mu: option known mean ddof: delta degrees of freedom returns: float """ var = Var(xs, mu, ddof) return math.sqrt(var) def MeanVar(xs, ddof=0): """Computes mean and variance. Based on http://stackoverflow.com/questions/19391149/ numpy-mean-and-variance-from-single-function xs: sequence of values ddof: delta degrees of freedom returns: pair of float, mean and var """ xs = np.asarray(xs) mean = xs.mean() s2 = Var(xs, mean, ddof) return mean, s2 def Trim(t, p=0.01): """Trims the largest and smallest elements of t. Args: t: sequence of numbers p: fraction of values to trim off each end Returns: sequence of values """ n = int(p * len(t)) t = sorted(t)[n:-n] return t def TrimmedMean(t, p=0.01): """Computes the trimmed mean of a sequence of numbers. Args: t: sequence of numbers p: fraction of values to trim off each end Returns: float """ t = Trim(t, p) return Mean(t) def TrimmedMeanVar(t, p=0.01): """Computes the trimmed mean and variance of a sequence of numbers. Side effect: sorts the list. Args: t: sequence of numbers p: fraction of values to trim off each end Returns: float """ t = Trim(t, p) mu, var = MeanVar(t) return mu, var def CohenEffectSize(group1, group2): """Compute Cohen's d. group1: Series or NumPy array group2: Series or NumPy array returns: float """ diff = group1.mean() - group2.mean() n1, n2 = len(group1), len(group2) var1 = group1.var() var2 = group2.var() pooled_var = (n1 * var1 + n2 * var2) / (n1 + n2) d = diff / math.sqrt(pooled_var) return d def Cov(xs, ys, meanx=None, meany=None): """Computes Cov(X, Y). Args: xs: sequence of values ys: sequence of values meanx: optional float mean of xs meany: optional float mean of ys Returns: Cov(X, Y) """ xs = np.asarray(xs) ys = np.asarray(ys) if meanx is None: meanx = np.mean(xs) if meany is None: meany = np.mean(ys) cov = np.dot(xs-meanx, ys-meany) / len(xs) return cov def Corr(xs, ys): """Computes Corr(X, Y). Args: xs: sequence of values ys: sequence of values Returns: Corr(X, Y) """ xs = np.asarray(xs) ys = np.asarray(ys) meanx, varx = MeanVar(xs) meany, vary = MeanVar(ys) corr = Cov(xs, ys, meanx, meany) / math.sqrt(varx * vary) return corr def SerialCorr(series, lag=1): """Computes the serial correlation of a series. series: Series lag: integer number of intervals to shift returns: float correlation """ xs = series[lag:] ys = series.shift(lag)[lag:] corr = Corr(xs, ys) return corr def SpearmanCorr(xs, ys): """Computes Spearman's rank correlation. Args: xs: sequence of values ys: sequence of values Returns: float Spearman's correlation """ xranks = pandas.Series(xs).rank() yranks = pandas.Series(ys).rank() return Corr(xranks, yranks) def MapToRanks(t): """Returns a list of ranks corresponding to the elements in t. Args: t: sequence of numbers Returns: list of integer ranks, starting at 1 """ # pair up each value with its index pairs = enumerate(t) # sort by value sorted_pairs = sorted(pairs, key=itemgetter(1)) # pair up each pair with its rank ranked = enumerate(sorted_pairs) # sort by index resorted = sorted(ranked, key=lambda trip: trip[1][0]) # extract the ranks ranks = [trip[0]+1 for trip in resorted] return ranks def LeastSquares(xs, ys): """Computes a linear least squares fit for ys as a function of xs. Args: xs: sequence of values ys: sequence of values Returns: tuple of (intercept, slope) """ meanx, varx = MeanVar(xs) meany = Mean(ys) slope = Cov(xs, ys, meanx, meany) / varx inter = meany - slope * meanx return inter, slope def FitLine(xs, inter, slope): """Fits a line to the given data. xs: sequence of x returns: tuple of numpy arrays (sorted xs, fit ys) """ fit_xs = np.sort(xs) fit_ys = inter + slope * fit_xs return fit_xs, fit_ys def Residuals(xs, ys, inter, slope): """Computes residuals for a linear fit with parameters inter and slope. Args: xs: independent variable ys: dependent variable inter: float intercept slope: float slope Returns: list of residuals """ xs = np.asarray(xs) ys = np.asarray(ys) res = ys - (inter + slope * xs) return res def CoefDetermination(ys, res): """Computes the coefficient of determination (R^2) for given residuals. Args: ys: dependent variable res: residuals Returns: float coefficient of determination """ return 1 - Var(res) / Var(ys) def CorrelatedGenerator(rho): """Generates standard normal variates with serial correlation. rho: target coefficient of correlation Returns: iterable """ x = random.gauss(0, 1) yield x sigma = math.sqrt(1 - rho**2) while True: x = random.gauss(x * rho, sigma) yield x def CorrelatedNormalGenerator(mu, sigma, rho): """Generates normal variates with serial correlation. mu: mean of variate sigma: standard deviation of variate rho: target coefficient of correlation Returns: iterable """ for x in CorrelatedGenerator(rho): yield x * sigma + mu def RawMoment(xs, k): """Computes the kth raw moment of xs. """ return sum(x**k for x in xs) / len(xs) def CentralMoment(xs, k): """Computes the kth central moment of xs. """ mean = RawMoment(xs, 1) return sum((x - mean)**k for x in xs) / len(xs) def StandardizedMoment(xs, k): """Computes the kth standardized moment of xs. """ var = CentralMoment(xs, 2) std = math.sqrt(var) return CentralMoment(xs, k) / std**k def Skewness(xs): """Computes skewness. """ return StandardizedMoment(xs, 3) def Median(xs): """Computes the median (50th percentile) of a sequence. xs: sequence or anything else that can initialize a Cdf returns: float """ cdf = Cdf(xs) return cdf.Value(0.5) def IQR(xs): """Computes the interquartile of a sequence. xs: sequence or anything else that can initialize a Cdf returns: pair of floats """ cdf = Cdf(xs) return cdf.Value(0.25), cdf.Value(0.75) def PearsonMedianSkewness(xs): """Computes the Pearson median skewness. """ median = Median(xs) mean = RawMoment(xs, 1) var = CentralMoment(xs, 2) std = math.sqrt(var) gp = 3 * (mean - median) / std return gp class FixedWidthVariables(object): """Represents a set of variables in a fixed width file.""" def __init__(self, variables, index_base=0): """Initializes. variables: DataFrame index_base: are the indices 0 or 1 based? Attributes: colspecs: list of (start, end) index tuples names: list of string variable names """ self.variables = variables # note: by default, subtract 1 from colspecs self.colspecs = variables[['start', 'end']] - index_base # convert colspecs to a list of pair of int self.colspecs = self.colspecs.astype(np.int).values.tolist() self.names = variables['name'] def ReadFixedWidth(self, filename, **options): """Reads a fixed width ASCII file. filename: string filename returns: DataFrame """ df = pandas.read_fwf(filename, colspecs=self.colspecs, names=self.names, **options) return df def ReadStataDct(dct_file, **options): """Reads a Stata dictionary file. dct_file: string filename options: dict of options passed to open() returns: FixedWidthVariables object """ type_map = dict(byte=int, int=int, long=int, float=float, double=float, numeric=float) var_info = [] with open(dct_file, **options) as f: for line in f: match = re.search( r'_column\(([^)]*)\)', line) if not match: continue start = int(match.group(1)) t = line.split() vtype, name, fstring = t[1:4] name = name.lower() if vtype.startswith('str'): vtype = str else: vtype = type_map[vtype] long_desc = ' '.join(t[4:]).strip('"') var_info.append((start, vtype, name, fstring, long_desc)) columns = ['start', 'type', 'name', 'fstring', 'desc'] variables = pandas.DataFrame(var_info, columns=columns) # fill in the end column by shifting the start column variables['end'] = variables.start.shift(-1) variables.loc[len(variables)-1, 'end'] = 0 dct = FixedWidthVariables(variables, index_base=1) return dct def Resample(xs, n=None): """Draw a sample from xs with the same length as xs. xs: sequence n: sample size (default: len(xs)) returns: NumPy array """ if n is None: n = len(xs) return np.random.choice(xs, n, replace=True) def SampleRows(df, nrows, replace=False): """Choose a sample of rows from a DataFrame. df: DataFrame nrows: number of rows replace: whether to sample with replacement returns: DataDf """ indices = np.random.choice(df.index, nrows, replace=replace) sample = df.loc[indices] return sample def ResampleRows(df): """Resamples rows from a DataFrame. df: DataFrame returns: DataFrame """ return SampleRows(df, len(df), replace=True) def ResampleRowsWeighted(df, column='finalwgt'): """Resamples a DataFrame using probabilities proportional to given column. df: DataFrame column: string column name to use as weights returns: DataFrame """ weights = df[column].copy() weights /= sum(weights) indices = np.random.choice(df.index, len(df), replace=True, p=weights) sample = df.loc[indices] return sample def PercentileRow(array, p): """Selects the row from a sorted array that maps to percentile p. p: float 0--100 returns: NumPy array (one row) """ rows, cols = array.shape index = int(rows * p / 100) return array[index,] def PercentileRows(ys_seq, percents): """Given a collection of lines, selects percentiles along vertical axis. For example, if ys_seq contains simulation results like ys as a function of time, and percents contains (5, 95), the result would be a 90% CI for each vertical slice of the simulation results. ys_seq: sequence of lines (y values) percents: list of percentiles (0-100) to select returns: list of NumPy arrays, one for each percentile """ nrows = len(ys_seq) ncols = len(ys_seq[0]) array = np.zeros((nrows, ncols)) for i, ys in enumerate(ys_seq): array[i,] = ys array = np.sort(array, axis=0) rows = [PercentileRow(array, p) for p in percents] return rows def Smooth(xs, sigma=2, **options): """Smooths a NumPy array with a Gaussian filter. xs: sequence sigma: standard deviation of the filter """ return ndimage.filters.gaussian_filter1d(xs, sigma, **options) class HypothesisTest(object): """Represents a hypothesis test.""" def __init__(self, data): """Initializes. data: data in whatever form is relevant """ self.data = data self.MakeModel() self.actual = self.TestStatistic(data) self.test_stats = None self.test_cdf = None def PValue(self, iters=1000): """Computes the distribution of the test statistic and p-value. iters: number of iterations returns: float p-value """ self.test_stats = [self.TestStatistic(self.RunModel()) for _ in range(iters)] self.test_cdf = Cdf(self.test_stats) count = sum(1 for x in self.test_stats if x >= self.actual) return count / iters def MaxTestStat(self): """Returns the largest test statistic seen during simulations. """ return max(self.test_stats) def PlotCdf(self, label=None): """Draws a Cdf with vertical lines at the observed test stat. """ def VertLine(x): """Draws a vertical line at x.""" thinkplot.Plot([x, x], [0, 1], color='0.8') VertLine(self.actual) thinkplot.Cdf(self.test_cdf, label=label) def TestStatistic(self, data): """Computes the test statistic. data: data in whatever form is relevant """ raise UnimplementedMethodException() def MakeModel(self): """Build a model of the null hypothesis. """ pass def RunModel(self): """Run the model of the null hypothesis. returns: simulated data """ raise UnimplementedMethodException() def main(): pass if __name__ == '__main__': main()
AllenDowney/MarriageNSFG
thinkstats2.py
Python
mit
75,264
import { Component } from '@angular/core'; import { NavController } from 'ionic-angular'; /* Generated class for the GridTestPage page. See http://ionicframework.com/docs/v2/components/#navigation for more info on Ionic pages and navigation. */ @Component({ templateUrl: 'build/pages/grid-test/grid-test.html', }) export class GridTestPage { constructor(private nav: NavController) {} }
fabioindaiatuba/curso-ionic2-udemy
app/pages/grid-test/grid-test.ts
TypeScript
mit
399
tr:nth-child(odd) td:nth-child(odd){ background : black; } tr:nth-child(even) td:nth-child(even){ background : black; } tr:nth-child(2) p::before { content : "P"; } tr:nth-child(7) p::before { content : "P"; }
c-square/python-lab
web/solutii/vrabie_victor/sah/ceva.css
CSS
mit
218
require "active_support/core_ext/module/attribute_accessors" require "rack/utils" module ActionDispatch class ExceptionWrapper cattr_accessor :rescue_responses, default: Hash.new(:internal_server_error).merge!( "ActionController::RoutingError" => :not_found, "AbstractController::ActionNotFound" => :not_found, "ActionController::MethodNotAllowed" => :method_not_allowed, "ActionController::UnknownHttpMethod" => :method_not_allowed, "ActionController::NotImplemented" => :not_implemented, "ActionController::UnknownFormat" => :not_acceptable, "ActionController::InvalidAuthenticityToken" => :unprocessable_entity, "ActionController::InvalidCrossOriginRequest" => :unprocessable_entity, "ActionDispatch::Http::Parameters::ParseError" => :bad_request, "ActionController::BadRequest" => :bad_request, "ActionController::ParameterMissing" => :bad_request, "Rack::QueryParser::ParameterTypeError" => :bad_request, "Rack::QueryParser::InvalidParameterError" => :bad_request ) cattr_accessor :rescue_templates, default: Hash.new("diagnostics").merge!( "ActionView::MissingTemplate" => "missing_template", "ActionController::RoutingError" => "routing_error", "AbstractController::ActionNotFound" => "unknown_action", "ActionView::Template::Error" => "template_error" ) attr_reader :backtrace_cleaner, :exception, :line_number, :file def initialize(backtrace_cleaner, exception) @backtrace_cleaner = backtrace_cleaner @exception = original_exception(exception) expand_backtrace if exception.is_a?(SyntaxError) || exception.cause.is_a?(SyntaxError) end def rescue_template @@rescue_templates[@exception.class.name] end def status_code self.class.status_code_for_exception(@exception.class.name) end def application_trace clean_backtrace(:silent) end def framework_trace clean_backtrace(:noise) end def full_trace clean_backtrace(:all) end def traces application_trace_with_ids = [] framework_trace_with_ids = [] full_trace_with_ids = [] full_trace.each_with_index do |trace, idx| trace_with_id = { id: idx, trace: trace } if application_trace.include?(trace) application_trace_with_ids << trace_with_id else framework_trace_with_ids << trace_with_id end full_trace_with_ids << trace_with_id end { "Application Trace" => application_trace_with_ids, "Framework Trace" => framework_trace_with_ids, "Full Trace" => full_trace_with_ids } end def self.status_code_for_exception(class_name) Rack::Utils.status_code(@@rescue_responses[class_name]) end def source_extracts backtrace.map do |trace| file, line_number = extract_file_and_line_number(trace) { code: source_fragment(file, line_number), line_number: line_number } end end private def backtrace Array(@exception.backtrace) end def original_exception(exception) if @@rescue_responses.has_key?(exception.cause.class.name) exception.cause else exception end end def clean_backtrace(*args) if backtrace_cleaner backtrace_cleaner.clean(backtrace, *args) else backtrace end end def source_fragment(path, line) return unless Rails.respond_to?(:root) && Rails.root full_path = Rails.root.join(path) if File.exist?(full_path) File.open(full_path, "r") do |file| start = [line - 3, 0].max lines = file.each_line.drop(start).take(6) Hash[*(start + 1..(lines.count + start)).zip(lines).flatten] end end end def extract_file_and_line_number(trace) # Split by the first colon followed by some digits, which works for both # Windows and Unix path styles. file, line = trace.match(/^(.+?):(\d+).*$/, &:captures) || trace [file, line.to_i] end def expand_backtrace @exception.backtrace.unshift( @exception.to_s.split("\n") ).flatten! end end end
maicher/rails
actionpack/lib/action_dispatch/middleware/exception_wrapper.rb
Ruby
mit
4,478
<!-- Report any requested source code --> <!-- Report the active source code --> <div class="bag-holder"> <a href="https://shop.diesel.com/cart" title="Go to My Bag" class="mini-cart-link"> <span id="mini-cart-icon-empty" class='header-mini-cart empty-black-bag'>Empty Bag</span> <span class="empty-bag-count">Bag Count</span> </a> <div class="minicart-tool-tip"> <span class="arrow"></span> <div class="msg">Be quick and Im yours</div> </div> </div> <div class="mini-bag-content container-fluid pageName" data-pagename="Mini Cart"> <div class="row"> <div class="col-lg-8 col-md-6 mini-cart-products empty-cart-wrapper"> <div class="left-text-block"> <h2 class="heading-txt">Feed me I'm hungry </h2> </div> </div> <div class="col-lg-4 col-md-6 mini-cart-totals"> <h2 class="heading-txt">SHOPPING BAG</h2> <div class="seperator"></div> <P class="item-section"> 0 Item </P> <div class="mini-cart-button clearfix"> <a class="primary-button red viewCart viewBag" href="https://shop.diesel.com/cart" title="Continue Shopping ">Continue Shopping </a> </div> </div> </div> <div class="row"> <span class="ipad_cross_icon"></span> </div> </div> <div class="rtaminicart" data-rtaCart="" data-rtaCartSku="" data-rtaCartAmounts="" data-rtaCartQuantities="" data-rtaSpecial="" data-rtaTags=""></div>
sygcom/diesel_2016
application/views/Cart_AddProduct_2016_08_04_product.php
PHP
mit
1,519
#include "Halide.h" using namespace Halide; // Compile a simple pipeline to an object and to C code. HalideExtern_2(int, an_extern_func, int, int); int main(int argc, char **argv) { Func f, g, h; ImageParam input(UInt(16), 2); Var x, y; f(x, y) = (input(clamp(x+2, 0, input.width()-1), clamp(y-2, 0, input.height()-1)) * 17)/13; h.define_extern("an_extern_stage", {f}, Int(16), 0); g(x, y) = f(y, x) + f(x, y) + cast<uint16_t>(an_extern_func(x, y)) + h(); h.compute_root(); f.compute_root(); f.debug_to_file("f.tiff"); std::vector<Argument> args; args.push_back(input); g.compile_to_header("pipeline_native.h", args, "pipeline_native"); g.compile_to_header("pipeline_c.h", args, "pipeline_c"); g.compile_to_object("pipeline_native.o", args, "pipeline_native"); g.compile_to_c("pipeline_c.c", args, "pipeline_c"); return 0; }
ayanazmat/Halide
apps/c_backend/pipeline.cpp
C++
mit
898
<?php use Api\Model\Languageforge\Lexicon\Command\LexOptionListCommands; use Api\Model\Languageforge\Lexicon\LexOptionListListModel; use PHPUnit\Framework\TestCase; class LexOptionListCommandsTest extends TestCase { public function testUpdateList_newList_createsOK() { $e = new LexiconMongoTestEnvironment(); $e->clean(); $project = $e->createProject(SF_TESTPROJECT, SF_TESTPROJECTCODE); $optionLists = new LexOptionListListModel($project); $optionLists->read(); // Initial project has no optionlists populated $this->assertEquals(0, $optionLists->count); // Initialized project has part of speech optionlist defined $project->initializeNewProject(); $optionLists->read(); $this->assertEquals(1, $optionLists->count); $initialValue = $optionLists->entries[0]['items'][0]['value']; $this->assertEquals('Adjective (adj)', $initialValue); // Swap first and last items of parts of speech list $count = count($optionLists->entries[0]['items']); $swap = $optionLists->entries[0]['items'][0]; $optionLists->entries[0]['items'][0] = $optionLists->entries[0]['items'][$count-1]; $optionLists->entries[0]['items'][$count-1] = $swap; LexOptionListCommands::updateList($project->id->asString(), $optionLists->entries[0]); $optionLists->read(); $newValue = $optionLists->entries[0]['items'][0]['value']; $this->assertEquals('Verb (v)', $newValue); // Create part of speech list for fruits $fruits = [ ['key' => 'a', 'value' => 'apple'], ['key' => 'b', 'value' => 'berry'], ['key' => 'c', 'value' => 'cherry'], ['key' => 'g', 'value' => 'grape'], ['key' => 'm', 'value' => 'mango'], ['key' => 'p', 'value' => 'pineapple'] ]; $data = [ 'id' => '', 'name' => 'List of Fruits', 'code' => 'fruits', 'items' => $fruits, 'canDelete' => false ]; LexOptionListCommands::updateList($project->id->asString(), $data); $optionLists->read(); $this->assertEquals(2, $optionLists->count); } }
sillsdev/web-scriptureforge
test/php/model/languageforge/lexicon/command/LexOptionListCommandsTest.php
PHP
mit
2,258
<?php namespace TYPO3\Fluid\Tests\Unit\Core\Parser\Interceptor; /* * This file is part of the TYPO3.Fluid package. * * (c) Contributors of the Neos Project - www.neos.io * * This package is Open Source Software. For the full copyright and license * information, please view the LICENSE file which was distributed with this * source code. */ use TYPO3\Flow\Tests\UnitTestCase; use TYPO3\Fluid\Core\Parser\Interceptor\Escape; use TYPO3\Fluid\Core\Parser\InterceptorInterface; use TYPO3\Fluid\Core\Parser\ParsingState; use TYPO3\Fluid\Core\Parser\SyntaxTree\ViewHelperNode; use TYPO3\Fluid\Core\ViewHelper\AbstractViewHelper; /** * Testcase for Interceptor\Escape */ class EscapeTest extends UnitTestCase { /** * @var Escape|\PHPUnit_Framework_MockObject_MockObject */ protected $escapeInterceptor; /** * @var AbstractViewHelper|\PHPUnit_Framework_MockObject_MockObject */ protected $mockViewHelper; /** * @var ViewHelperNode|\PHPUnit_Framework_MockObject_MockObject */ protected $mockNode; /** * @var ParsingState|\PHPUnit_Framework_MockObject_MockObject */ protected $mockParsingState; public function setUp() { $this->escapeInterceptor = $this->getAccessibleMock(\TYPO3\Fluid\Core\Parser\Interceptor\Escape::class, array('dummy')); $this->mockViewHelper = $this->getMockBuilder(\TYPO3\Fluid\Core\ViewHelper\AbstractViewHelper::class)->disableOriginalConstructor()->getMock(); $this->mockNode = $this->getMockBuilder(\TYPO3\Fluid\Core\Parser\SyntaxTree\ViewHelperNode::class)->disableOriginalConstructor()->getMock(); $this->mockParsingState = $this->getMockBuilder(\TYPO3\Fluid\Core\Parser\ParsingState::class)->disableOriginalConstructor()->getMock(); } /** * @test */ public function processDoesNotDisableEscapingInterceptorByDefault() { $interceptorPosition = InterceptorInterface::INTERCEPT_OPENING_VIEWHELPER; $this->mockViewHelper->expects($this->once())->method('isChildrenEscapingEnabled')->will($this->returnValue(true)); $this->mockNode->expects($this->once())->method('getUninitializedViewHelper')->will($this->returnValue($this->mockViewHelper)); $this->assertTrue($this->escapeInterceptor->_get('childrenEscapingEnabled')); $this->escapeInterceptor->process($this->mockNode, $interceptorPosition, $this->mockParsingState); $this->assertTrue($this->escapeInterceptor->_get('childrenEscapingEnabled')); } /** * @test */ public function processDisablesEscapingInterceptorIfViewHelperDisablesIt() { $interceptorPosition = InterceptorInterface::INTERCEPT_OPENING_VIEWHELPER; $this->mockViewHelper->expects($this->once())->method('isChildrenEscapingEnabled')->will($this->returnValue(false)); $this->mockNode->expects($this->once())->method('getUninitializedViewHelper')->will($this->returnValue($this->mockViewHelper)); $this->assertTrue($this->escapeInterceptor->_get('childrenEscapingEnabled')); $this->escapeInterceptor->process($this->mockNode, $interceptorPosition, $this->mockParsingState); $this->assertFalse($this->escapeInterceptor->_get('childrenEscapingEnabled')); } /** * @test */ public function processReenablesEscapingInterceptorOnClosingViewHelperTagIfItWasDisabledBefore() { $interceptorPosition = InterceptorInterface::INTERCEPT_CLOSING_VIEWHELPER; $this->mockViewHelper->expects($this->any())->method('isOutputEscapingEnabled')->will($this->returnValue(false)); $this->mockNode->expects($this->any())->method('getUninitializedViewHelper')->will($this->returnValue($this->mockViewHelper)); $this->escapeInterceptor->_set('childrenEscapingEnabled', false); $this->escapeInterceptor->_set('viewHelperNodesWhichDisableTheInterceptor', array($this->mockNode)); $this->escapeInterceptor->process($this->mockNode, $interceptorPosition, $this->mockParsingState); $this->assertTrue($this->escapeInterceptor->_get('childrenEscapingEnabled')); } /** * @test */ public function processWrapsCurrentViewHelperInHtmlspecialcharsViewHelperOnObjectAccessor() { $interceptorPosition = InterceptorInterface::INTERCEPT_OBJECTACCESSOR; $mockNode = $this->getMockBuilder(\TYPO3\Fluid\Core\Parser\SyntaxTree\ObjectAccessorNode::class)->disableOriginalConstructor()->getMock(); $mockEscapeViewHelper = $this->createMock(\TYPO3\Fluid\ViewHelpers\Format\HtmlspecialcharsViewHelper::class); $mockObjectManager = $this->createMock(\TYPO3\Flow\Object\ObjectManagerInterface::class); $mockObjectManager->expects($this->at(0))->method('get')->with(\TYPO3\Fluid\ViewHelpers\Format\HtmlspecialcharsViewHelper::class)->will($this->returnValue($mockEscapeViewHelper)); $mockObjectManager->expects($this->at(1))->method('get')->with(\TYPO3\Fluid\Core\Parser\SyntaxTree\ViewHelperNode::class, $mockEscapeViewHelper, array('value' => $mockNode))->will($this->returnValue($this->mockNode)); $this->escapeInterceptor->injectObjectManager($mockObjectManager); $actualResult = $this->escapeInterceptor->process($mockNode, $interceptorPosition, $this->mockParsingState); $this->assertSame($this->mockNode, $actualResult); } }
neos/fluid
Tests/Unit/Core/Parser/Interceptor/EscapeTest.php
PHP
mit
5,380
<a href='https://github.com/angular/angular.js/edit/v1.3.x/src/ng/directive/ngEventDirs.js?message=docs(ngKeydown)%3A%20describe%20your%20change...#L242' class='improve-docs btn btn-primary'><i class="glyphicon glyphicon-edit">&nbsp;</i>Improve this Doc</a> <a href='https://github.com/angular/angular.js/tree/v1.3.1/src/ng/directive/ngEventDirs.js#L242' class='view-source pull-right btn btn-primary'> <i class="glyphicon glyphicon-zoom-in">&nbsp;</i>View Source </a> <header class="api-profile-header"> <h1 class="api-profile-header-heading">ngKeydown</h1> <ol class="api-profile-header-structure naked-list step-list"> <li> - directive in module <a href="api/ng">ng</a> </li> </ol> </header> <div class="api-profile-description"> <p>Specify custom behavior on keydown event.</p> </div> <div> <h2>Directive Info</h2> <ul> <li>This directive executes at priority level 0.</li> </ul> <h2 id="usage">Usage</h2> <div class="usage"> <ul> <li>as attribute: <pre><code>&lt;ANY&#10; ng-keydown=&quot;&quot;&gt;&#10;...&#10;&lt;/ANY&gt;</code></pre> </li> </div> <section class="api-section"> <h3>Arguments</h3> <table class="variables-matrix input-arguments"> <thead> <tr> <th>Param</th> <th>Type</th> <th>Details</th> </tr> </thead> <tbody> <tr> <td> ngKeydown </td> <td> <a href="" class="label type-hint type-hint-expression">expression</a> </td> <td> <p><a href="guide/expression">Expression</a> to evaluate upon keydown. (Event object is available as <code>$event</code> and can be interrogated for keyCode, altKey, etc.)</p> </td> </tr> </tbody> </table> </section> <h2 id="example">Example</h2><p> <div> <a ng-click="openPlunkr('examples/example-example74')" class="btn pull-right"> <i class="glyphicon glyphicon-edit">&nbsp;</i> Edit in Plunker</a> <div class="runnable-example" path="examples/example-example74"> <div class="runnable-example-file" name="index.html" language="html" type="html"> <pre><code>&lt;input ng-keydown=&quot;count = count + 1&quot; ng-init=&quot;count=0&quot;&gt;&#10;key down count: {{count}}</code></pre> </div> <iframe class="runnable-example-frame" src="examples/example-example74/index.html" name="example-example74"></iframe> </div> </div> </p> </div>
jeros-mz/angular.mobile.prototype
scripts/angular/docs/partials/api/ng/directive/ngKeydown.html
HTML
mit
2,520
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import { Injector, NgModule, NgZone, OpaqueToken } from '@angular/core'; import { AsyncTestCompleter } from './async_test_completer'; import { ComponentFixture } from './component_fixture'; import { stringify } from './facade/lang'; import { TestingCompilerFactory } from './test_compiler'; var UNDEFINED = new Object(); /** * An abstract class for inserting the root test component element in a platform independent way. * * @experimental */ export var TestComponentRenderer = (function () { function TestComponentRenderer() { } TestComponentRenderer.prototype.insertRootElement = function (rootElementId) { }; return TestComponentRenderer; }()); var _nextRootElementId = 0; /** * @experimental */ export var ComponentFixtureAutoDetect = new OpaqueToken('ComponentFixtureAutoDetect'); /** * @experimental */ export var ComponentFixtureNoNgZone = new OpaqueToken('ComponentFixtureNoNgZone'); /** * @whatItDoes Configures and initializes environment for unit testing and provides methods for * creating components and services in unit tests. * @description * * TestBed is the primary api for writing unit tests for Angular applications and libraries. * * @stable */ export var TestBed = (function () { function TestBed() { this._instantiated = false; this._compiler = null; this._moduleRef = null; this._moduleWithComponentFactories = null; this._compilerOptions = []; this._moduleOverrides = []; this._componentOverrides = []; this._directiveOverrides = []; this._pipeOverrides = []; this._providers = []; this._declarations = []; this._imports = []; this._schemas = []; this._activeFixtures = []; this.platform = null; this.ngModule = null; } /** * Initialize the environment for testing with a compiler factory, a PlatformRef, and an * angular module. These are common to every test in the suite. * * This may only be called once, to set up the common providers for the current test * suite on the current platform. If you absolutely need to change the providers, * first use `resetTestEnvironment`. * * Test modules and platforms for individual platforms are available from * '@angular/<platform_name>/testing'. * * @experimental */ TestBed.initTestEnvironment = function (ngModule, platform) { var testBed = getTestBed(); testBed.initTestEnvironment(ngModule, platform); return testBed; }; /** * Reset the providers for the test injector. * * @experimental */ TestBed.resetTestEnvironment = function () { getTestBed().resetTestEnvironment(); }; TestBed.resetTestingModule = function () { getTestBed().resetTestingModule(); return TestBed; }; /** * Allows overriding default compiler providers and settings * which are defined in test_injector.js */ TestBed.configureCompiler = function (config) { getTestBed().configureCompiler(config); return TestBed; }; /** * Allows overriding default providers, directives, pipes, modules of the test injector, * which are defined in test_injector.js */ TestBed.configureTestingModule = function (moduleDef) { getTestBed().configureTestingModule(moduleDef); return TestBed; }; /** * Compile components with a `templateUrl` for the test's NgModule. * It is necessary to call this function * as fetching urls is asynchronous. */ TestBed.compileComponents = function () { return getTestBed().compileComponents(); }; TestBed.overrideModule = function (ngModule, override) { getTestBed().overrideModule(ngModule, override); return TestBed; }; TestBed.overrideComponent = function (component, override) { getTestBed().overrideComponent(component, override); return TestBed; }; TestBed.overrideDirective = function (directive, override) { getTestBed().overrideDirective(directive, override); return TestBed; }; TestBed.overridePipe = function (pipe, override) { getTestBed().overridePipe(pipe, override); return TestBed; }; TestBed.get = function (token, notFoundValue) { if (notFoundValue === void 0) { notFoundValue = Injector.THROW_IF_NOT_FOUND; } return getTestBed().get(token, notFoundValue); }; TestBed.createComponent = function (component) { return getTestBed().createComponent(component); }; /** * Initialize the environment for testing with a compiler factory, a PlatformRef, and an * angular module. These are common to every test in the suite. * * This may only be called once, to set up the common providers for the current test * suite on the current platform. If you absolutely need to change the providers, * first use `resetTestEnvironment`. * * Test modules and platforms for individual platforms are available from * '@angular/<platform_name>/testing'. * * @experimental */ TestBed.prototype.initTestEnvironment = function (ngModule, platform) { if (this.platform || this.ngModule) { throw new Error('Cannot set base providers because it has already been called'); } this.platform = platform; this.ngModule = ngModule; }; /** * Reset the providers for the test injector. * * @experimental */ TestBed.prototype.resetTestEnvironment = function () { this.resetTestingModule(); this.platform = null; this.ngModule = null; }; TestBed.prototype.resetTestingModule = function () { this._compiler = null; this._moduleOverrides = []; this._componentOverrides = []; this._directiveOverrides = []; this._pipeOverrides = []; this._moduleRef = null; this._moduleWithComponentFactories = null; this._compilerOptions = []; this._providers = []; this._declarations = []; this._imports = []; this._schemas = []; this._instantiated = false; this._activeFixtures.forEach(function (fixture) { return fixture.destroy(); }); this._activeFixtures = []; }; TestBed.prototype.configureCompiler = function (config) { this._assertNotInstantiated('TestBed.configureCompiler', 'configure the compiler'); this._compilerOptions.push(config); }; TestBed.prototype.configureTestingModule = function (moduleDef) { this._assertNotInstantiated('TestBed.configureTestingModule', 'configure the test module'); if (moduleDef.providers) { (_a = this._providers).push.apply(_a, moduleDef.providers); } if (moduleDef.declarations) { (_b = this._declarations).push.apply(_b, moduleDef.declarations); } if (moduleDef.imports) { (_c = this._imports).push.apply(_c, moduleDef.imports); } if (moduleDef.schemas) { (_d = this._schemas).push.apply(_d, moduleDef.schemas); } var _a, _b, _c, _d; }; TestBed.prototype.compileComponents = function () { var _this = this; if (this._moduleWithComponentFactories || this._instantiated) { return Promise.resolve(null); } var moduleType = this._createCompilerAndModule(); return this._compiler.compileModuleAndAllComponentsAsync(moduleType) .then(function (moduleAndComponentFactories) { _this._moduleWithComponentFactories = moduleAndComponentFactories; }); }; TestBed.prototype._initIfNeeded = function () { if (this._instantiated) { return; } if (!this._moduleWithComponentFactories) { try { var moduleType = this._createCompilerAndModule(); this._moduleWithComponentFactories = this._compiler.compileModuleAndAllComponentsSync(moduleType); } catch (e) { if (e.compType) { throw new Error(("This test module uses the component " + stringify(e.compType) + " which is using a \"templateUrl\", but they were never compiled. ") + "Please call \"TestBed.compileComponents\" before your test."); } else { throw e; } } } this._moduleRef = this._moduleWithComponentFactories.ngModuleFactory.create(this.platform.injector); this._instantiated = true; }; TestBed.prototype._createCompilerAndModule = function () { var _this = this; var providers = this._providers.concat([{ provide: TestBed, useValue: this }]); var declarations = this._declarations; var imports = [this.ngModule, this._imports]; var schemas = this._schemas; var DynamicTestModule = (function () { function DynamicTestModule() { } DynamicTestModule.decorators = [ { type: NgModule, args: [{ providers: providers, declarations: declarations, imports: imports, schemas: schemas },] }, ]; /** @nocollapse */ DynamicTestModule.ctorParameters = function () { return []; }; return DynamicTestModule; }()); var compilerFactory = this.platform.injector.get(TestingCompilerFactory); this._compiler = compilerFactory.createTestingCompiler(this._compilerOptions.concat([{ useDebug: true }])); this._moduleOverrides.forEach(function (entry) { return _this._compiler.overrideModule(entry[0], entry[1]); }); this._componentOverrides.forEach(function (entry) { return _this._compiler.overrideComponent(entry[0], entry[1]); }); this._directiveOverrides.forEach(function (entry) { return _this._compiler.overrideDirective(entry[0], entry[1]); }); this._pipeOverrides.forEach(function (entry) { return _this._compiler.overridePipe(entry[0], entry[1]); }); return DynamicTestModule; }; TestBed.prototype._assertNotInstantiated = function (methodName, methodDescription) { if (this._instantiated) { throw new Error(("Cannot " + methodDescription + " when the test module has already been instantiated. ") + ("Make sure you are not using `inject` before `" + methodName + "`.")); } }; TestBed.prototype.get = function (token, notFoundValue) { if (notFoundValue === void 0) { notFoundValue = Injector.THROW_IF_NOT_FOUND; } this._initIfNeeded(); if (token === TestBed) { return this; } // Tests can inject things from the ng module and from the compiler, // but the ng module can't inject things from the compiler and vice versa. var result = this._moduleRef.injector.get(token, UNDEFINED); return result === UNDEFINED ? this._compiler.injector.get(token, notFoundValue) : result; }; TestBed.prototype.execute = function (tokens, fn) { var _this = this; this._initIfNeeded(); var params = tokens.map(function (t) { return _this.get(t); }); return fn.apply(void 0, params); }; TestBed.prototype.overrideModule = function (ngModule, override) { this._assertNotInstantiated('overrideModule', 'override module metadata'); this._moduleOverrides.push([ngModule, override]); }; TestBed.prototype.overrideComponent = function (component, override) { this._assertNotInstantiated('overrideComponent', 'override component metadata'); this._componentOverrides.push([component, override]); }; TestBed.prototype.overrideDirective = function (directive, override) { this._assertNotInstantiated('overrideDirective', 'override directive metadata'); this._directiveOverrides.push([directive, override]); }; TestBed.prototype.overridePipe = function (pipe, override) { this._assertNotInstantiated('overridePipe', 'override pipe metadata'); this._pipeOverrides.push([pipe, override]); }; TestBed.prototype.createComponent = function (component) { var _this = this; this._initIfNeeded(); var componentFactory = this._moduleWithComponentFactories.componentFactories.find(function (compFactory) { return compFactory.componentType === component; }); if (!componentFactory) { throw new Error("Cannot create the component " + stringify(component) + " as it was not imported into the testing module!"); } var noNgZone = this.get(ComponentFixtureNoNgZone, false); var autoDetect = this.get(ComponentFixtureAutoDetect, false); var ngZone = noNgZone ? null : this.get(NgZone, null); var testComponentRenderer = this.get(TestComponentRenderer); var rootElId = "root" + _nextRootElementId++; testComponentRenderer.insertRootElement(rootElId); var initComponent = function () { var componentRef = componentFactory.create(_this, [], "#" + rootElId); return new ComponentFixture(componentRef, ngZone, autoDetect); }; var fixture = !ngZone ? initComponent() : ngZone.run(initComponent); this._activeFixtures.push(fixture); return fixture; }; return TestBed; }()); var _testBed = null; /** * @experimental */ export function getTestBed() { return _testBed = _testBed || new TestBed(); } /** * Allows injecting dependencies in `beforeEach()` and `it()`. * * Example: * * ``` * beforeEach(inject([Dependency, AClass], (dep, object) => { * // some code that uses `dep` and `object` * // ... * })); * * it('...', inject([AClass], (object) => { * object.doSomething(); * expect(...); * }) * ``` * * Notes: * - inject is currently a function because of some Traceur limitation the syntax should * eventually * becomes `it('...', @Inject (object: AClass, async: AsyncTestCompleter) => { ... });` * * @stable */ export function inject(tokens, fn) { var testBed = getTestBed(); if (tokens.indexOf(AsyncTestCompleter) >= 0) { return function () { // Return an async test method that returns a Promise if AsyncTestCompleter is one of // the // injected tokens. return testBed.compileComponents().then(function () { var completer = testBed.get(AsyncTestCompleter); testBed.execute(tokens, fn); return completer.promise; }); }; } else { return function () { return testBed.execute(tokens, fn); }; } } /** * @experimental */ export var InjectSetupWrapper = (function () { function InjectSetupWrapper(_moduleDef) { this._moduleDef = _moduleDef; } InjectSetupWrapper.prototype._addModule = function () { var moduleDef = this._moduleDef(); if (moduleDef) { getTestBed().configureTestingModule(moduleDef); } }; InjectSetupWrapper.prototype.inject = function (tokens, fn) { var _this = this; return function () { _this._addModule(); return inject(tokens, fn)(); }; }; return InjectSetupWrapper; }()); export function withModule(moduleDef, fn) { if (fn === void 0) { fn = null; } if (fn) { return function () { var testBed = getTestBed(); if (moduleDef) { testBed.configureTestingModule(moduleDef); } return fn(); }; } return new InjectSetupWrapper(function () { return moduleDef; }); } //# sourceMappingURL=test_bed.js.map
ujjwalsharma045/Angular2-Creative-Tim-Admin-Theme-With-Code-
node_modules/ng2-opd-popup/node_modules/@angular/core/testing/test_bed.js
JavaScript
mit
16,066
<?php namespace AppBundle\Controller; use Symfony\Component\HttpFoundation\Request; use Symfony\Bundle\FrameworkBundle\Controller\Controller; use Sensio\Bundle\FrameworkExtraBundle\Configuration\Method; use Sensio\Bundle\FrameworkExtraBundle\Configuration\Route; use Sensio\Bundle\FrameworkExtraBundle\Configuration\Template; use AppBundle\Entity\Pokoj; use AppBundle\Form\PokojType; /** * Pokoj controller. * * @Route("/admin/pokoj") */ class PokojController extends Controller { /** * Lists all Pokoj entities. * * @Route("/", name="admin_pokoj") * @Method("GET") * @Template() */ public function indexAction() { $em = $this->getDoctrine()->getManager(); $entities = $em->getRepository('AppBundle:Pokoj')->findAll(); return array( 'entities' => $entities, ); } /** * Creates a new Pokoj entity. * * @Route("/", name="admin_pokoj_create") * @Method("POST") * @Template("AppBundle:Pokoj:new.html.twig") */ public function createAction(Request $request) { $entity = new Pokoj(); $form = $this->createCreateForm($entity); $form->handleRequest($request); if ($form->isValid()) { $em = $this->getDoctrine()->getManager(); $em->persist($entity); $em->flush(); return $this->redirect($this->generateUrl('admin_pokoj_show', array('id' => $entity->getId()))); } return array( 'entity' => $entity, 'form' => $form->createView(), ); } /** * Creates a form to create a Pokoj entity. * * @param Pokoj $entity The entity * * @return \Symfony\Component\Form\Form The form */ private function createCreateForm(Pokoj $entity) { $form = $this->createForm(new PokojType(), $entity, array( 'action' => $this->generateUrl('admin_pokoj_create'), 'method' => 'POST', )); $form->add('submit', 'submit', array('label' => 'Create')); return $form; } /** * Displays a form to create a new Pokoj entity. * * @Route("/new", name="admin_pokoj_new") * @Method("GET") * @Template() */ public function newAction() { $entity = new Pokoj(); $form = $this->createCreateForm($entity); return array( 'entity' => $entity, 'form' => $form->createView(), ); } /** * Finds and displays a Pokoj entity. * * @Route("/{id}", name="admin_pokoj_show") * @Method("GET") * @Template() */ public function showAction($id) { $em = $this->getDoctrine()->getManager(); $entity = $em->getRepository('AppBundle:Pokoj')->find($id); if (!$entity) { throw $this->createNotFoundException('Unable to find Pokoj entity.'); } $deleteForm = $this->createDeleteForm($id); return array( 'entity' => $entity, 'delete_form' => $deleteForm->createView(), ); } /** * Displays a form to edit an existing Pokoj entity. * * @Route("/{id}/edit", name="admin_pokoj_edit") * @Method("GET") * @Template() */ public function editAction($id) { $em = $this->getDoctrine()->getManager(); $entity = $em->getRepository('AppBundle:Pokoj')->find($id); if (!$entity) { throw $this->createNotFoundException('Unable to find Pokoj entity.'); } $editForm = $this->createEditForm($entity); $deleteForm = $this->createDeleteForm($id); return array( 'entity' => $entity, 'edit_form' => $editForm->createView(), 'delete_form' => $deleteForm->createView(), ); } /** * Creates a form to edit a Pokoj entity. * * @param Pokoj $entity The entity * * @return \Symfony\Component\Form\Form The form */ private function createEditForm(Pokoj $entity) { $form = $this->createForm(new PokojType(), $entity, array( 'action' => $this->generateUrl('admin_pokoj_update', array('id' => $entity->getId())), 'method' => 'PUT', )); $form->add('submit', 'submit', array('label' => 'Update')); return $form; } /** * Edits an existing Pokoj entity. * * @Route("/{id}", name="admin_pokoj_update") * @Method("PUT") * @Template("AppBundle:Pokoj:edit.html.twig") */ public function updateAction(Request $request, $id) { $em = $this->getDoctrine()->getManager(); $entity = $em->getRepository('AppBundle:Pokoj')->find($id); if (!$entity) { throw $this->createNotFoundException('Unable to find Pokoj entity.'); } $deleteForm = $this->createDeleteForm($id); $editForm = $this->createEditForm($entity); $editForm->handleRequest($request); if ($editForm->isValid()) { $em->flush(); return $this->redirect($this->generateUrl('admin_pokoj_edit', array('id' => $id))); } return array( 'entity' => $entity, 'edit_form' => $editForm->createView(), 'delete_form' => $deleteForm->createView(), ); } /** * Deletes a Pokoj entity. * * @Route("/{id}", name="admin_pokoj_delete") * @Method("DELETE") */ public function deleteAction(Request $request, $id) { $form = $this->createDeleteForm($id); $form->handleRequest($request); if ($form->isValid()) { $em = $this->getDoctrine()->getManager(); $entity = $em->getRepository('AppBundle:Pokoj')->find($id); if (!$entity) { throw $this->createNotFoundException('Unable to find Pokoj entity.'); } $em->remove($entity); $em->flush(); } return $this->redirect($this->generateUrl('admin_pokoj')); } /** * Creates a form to delete a Pokoj entity by id. * * @param mixed $id The entity id * * @return \Symfony\Component\Form\Form The form */ private function createDeleteForm($id) { return $this->createFormBuilder() ->setAction($this->generateUrl('admin_pokoj_delete', array('id' => $id))) ->setMethod('DELETE') ->add('submit', 'submit', array('label' => 'Delete')) ->getForm() ; } }
Dragonis/exam-git
src/AppBundle/Controller/PokojController.php
PHP
mit
6,600
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>windows::basic_stream_handle::implementation_type</title> <link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.78.1"> <link rel="home" href="../../../boost_asio.html" title="Boost.Asio"> <link rel="up" href="../windows__basic_stream_handle.html" title="windows::basic_stream_handle"> <link rel="prev" href="implementation.html" title="windows::basic_stream_handle::implementation"> <link rel="next" href="is_open.html" title="windows::basic_stream_handle::is_open"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td> <td align="center"><a href="../../../../../index.html">Home</a></td> <td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="implementation.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../windows__basic_stream_handle.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="is_open.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="section"> <div class="titlepage"><div><div><h4 class="title"> <a name="boost_asio.reference.windows__basic_stream_handle.implementation_type"></a><a class="link" href="implementation_type.html" title="windows::basic_stream_handle::implementation_type">windows::basic_stream_handle::implementation_type</a> </h4></div></div></div> <p> <span class="emphasis"><em>Inherited from basic_io_object.</em></span> </p> <p> <a class="indexterm" name="idp186064560"></a> The underlying implementation type of I/O object. </p> <pre class="programlisting"><span class="keyword">typedef</span> <span class="identifier">service_type</span><span class="special">::</span><span class="identifier">implementation_type</span> <span class="identifier">implementation_type</span><span class="special">;</span> </pre> <h6> <a name="boost_asio.reference.windows__basic_stream_handle.implementation_type.h0"></a> <span class="phrase"><a name="boost_asio.reference.windows__basic_stream_handle.implementation_type.requirements"></a></span><a class="link" href="implementation_type.html#boost_asio.reference.windows__basic_stream_handle.implementation_type.requirements">Requirements</a> </h6> <p> <span class="emphasis"><em>Header: </em></span><code class="literal">boost/asio/windows/basic_stream_handle.hpp</code> </p> <p> <span class="emphasis"><em>Convenience header: </em></span><code class="literal">boost/asio.hpp</code> </p> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2003-2015 Christopher M. Kohlhoff<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="implementation.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../windows__basic_stream_handle.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="is_open.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
TyRoXx/cdm
original_sources/boost_1_59_0/doc/html/boost_asio/reference/windows__basic_stream_handle/implementation_type.html
HTML
mit
4,291
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ export interface Position { readonly offset: number; readonly line: number; readonly character: number; } export type JsonAstNode = JsonAstNumber | JsonAstString | JsonAstIdentifier | JsonAstArray | JsonAstObject | JsonAstConstantFalse | JsonAstConstantNull | JsonAstConstantTrue; export interface JsonAstNodeBase { readonly start: Position; readonly end: Position; readonly text: string; readonly comments?: (JsonAstComment | JsonAstMultilineComment)[]; } export interface JsonAstNumber extends JsonAstNodeBase { readonly kind: 'number'; readonly value: number; } export interface JsonAstString extends JsonAstNodeBase { readonly kind: 'string'; readonly value: string; } export interface JsonAstIdentifier extends JsonAstNodeBase { readonly kind: 'identifier'; readonly value: string; } export interface JsonArray extends Array<JsonValue> {} export interface JsonAstArray extends JsonAstNodeBase { readonly kind: 'array'; readonly elements: JsonAstNode[]; readonly value: JsonArray; } export interface JsonObject { [prop: string]: JsonValue; } export interface JsonAstKeyValue extends JsonAstNodeBase { readonly kind: 'keyvalue'; readonly key: JsonAstString | JsonAstIdentifier; readonly value: JsonAstNode; } export interface JsonAstObject extends JsonAstNodeBase { readonly kind: 'object'; readonly properties: JsonAstKeyValue[]; readonly value: JsonObject; } export interface JsonAstConstantFalse extends JsonAstNodeBase { readonly kind: 'false'; readonly value: false; } export interface JsonAstConstantNull extends JsonAstNodeBase { readonly kind: 'null'; readonly value: null; } export interface JsonAstConstantTrue extends JsonAstNodeBase { readonly kind: 'true'; readonly value: true; } // Loose mode AST. export interface JsonAstMultilineComment extends JsonAstNodeBase { readonly kind: 'multicomment'; readonly content: string; } export interface JsonAstComment extends JsonAstNodeBase { readonly kind: 'comment'; readonly content: string; } export type JsonValue = JsonAstNode['value']; export function isJsonObject(value: JsonValue): value is JsonObject { return value != null && typeof value === 'object' && !Array.isArray(value); } export function isJsonArray(value: JsonValue): value is JsonArray { return Array.isArray(value); }
DevIntent/angular-cli
packages/angular_devkit/core/src/json/interface.ts
TypeScript
mit
2,582
'use strict'; (function(window, document, $, undefined) { window.kunstmaan = window.kunstmaan || {}; window.kunstmaan.leadGeneration = window.kunstmaan.leadGeneration || {}; window.kunstmaan.leadGeneration.Popup = function(name, htmlId) { var instance = { 'name': name }; var _rules = [], _$popup = $('#' + htmlId), _$close = $('.' + htmlId + '--close'), _$noThanks = $('.' + htmlId + '--no-thanks'), _$submit = $('.' + htmlId + '--submit'); var _listenToHtmlClicks, _listenToEvents, _conditionsMet, _forEachRule, _doConditionsMetLogic, _htmlNoThanks, _noThanks, _htmlClose, _close, _conversion, _htmlSubmit, _submit, _onSubmitSuccess, _getData, _setData; instance.addRule = function(rule) { rule.setPopup(instance); _rules.push(rule); }; instance.activate = function() { _listenToHtmlClicks(); _listenToEvents(); window.kunstmaan.leadGeneration.log(instance.name + ": activating all rules"); // listening to all rules document.addEventListener(window.kunstmaan.leadGeneration.events.CONDITIONS_MET, _conditionsMet, true); var data = _getData(); // if not converted && not clicked "no thanks", activate & listen to rules if (data === null || (!data.already_converted && !data.no_thanks)) { if (_rules.length === 0) { // when there are nu rules, directly show the popup _doConditionsMetLogic(); } else { _forEachRule(function(rule) { rule.activate(); }); } } }; instance.show = function() { window.kunstmaan.leadGeneration.log(instance.name + ": show popup"); document.dispatchEvent(new window.CustomEvent(window.kunstmaan.leadGeneration.events.BEFORE_SHOWING, { detail: {popup: instance.name} })); $('#' + htmlId).removeClass('popup--hide').addClass('popup--show'); var data = _getData(); data.last_shown = new window.Date().getTime(); _setData(data); document.dispatchEvent(new window.CustomEvent(window.kunstmaan.leadGeneration.events.IS_SHOWING, { detail: {popup: instance.name} })); }; instance.setRuleProperty = function(ruleId, id, value) { var data = _getData(); if (!data.rule) { data.rule = {}; } if (!data.rule[ruleId]) { data.rule[ruleId] = {}; } // adjust timestamp data.rule[ruleId][id] = value; // store in browser storage _setData(data); }; instance.getRuleProperty = function(ruleId, id) { var data = _getData(); if (!data.rule || !data.rule[ruleId] || !data.rule[ruleId][id]) { return false; } return data.rule[ruleId][id]; }; _listenToHtmlClicks = function() { _$close.click(_htmlClose); _$noThanks.click(_htmlNoThanks); _$submit.click(_htmlSubmit); }; _listenToEvents = function() { document.addEventListener(window.kunstmaan.leadGeneration.events.DO_CLOSE, _close, true); document.addEventListener(window.kunstmaan.leadGeneration.events.DO_NO_THANKS, _noThanks, true); document.addEventListener(window.kunstmaan.leadGeneration.events.DO_SUBMIT_FORM, _submit, true); document.addEventListener(window.kunstmaan.leadGeneration.events.DO_CONVERSION, _conversion, true); }; _conditionsMet = function(event) { if (event.detail.popup === instance.name) { window.kunstmaan.leadGeneration.log(instance.name + ": checking all conditions"); _doConditionsMetLogic(); } }; _forEachRule = function(cb) { var i = 0; for (; i < _rules.length; i++) { cb(_rules[i]); } }; _doConditionsMetLogic = function() { var areMet = true; _forEachRule(function(rule) { if (!rule.isMet) { areMet = false; } }); var data = _getData(); // if all conditions are met notify that the popup is ready to be shown if (areMet && (data === null || (!data.already_converted && !data.no_thanks))) { window.kunstmaan.leadGeneration.log(instance.name + ": firing ready event"); document.dispatchEvent(new window.CustomEvent(window.kunstmaan.leadGeneration.events.READY_TO_SHOW, { detail: {popup: instance.name} })); } }; _htmlNoThanks = function(event) { window.kunstmaan.leadGeneration.log(instance.name + ": no thanks"); event.preventDefault(); document.dispatchEvent(new window.CustomEvent(window.kunstmaan.leadGeneration.events.DO_NO_THANKS, { detail: {popup: instance.name} })); }; _noThanks = function(event) { if (event.detail.popup === instance.name) { window.kunstmaan.leadGeneration.log(instance.name + ": no thanks event catched"); var data = _getData(); data.no_thanks = true; _setData(data); document.dispatchEvent(new window.CustomEvent(window.kunstmaan.leadGeneration.events.NO_THANKS, { detail: {popup: instance.name} })); _close(event); } }; _htmlClose = function(event) { event.preventDefault(); window.kunstmaan.leadGeneration.log(instance.name + ": html close click"); document.dispatchEvent(new window.CustomEvent(window.kunstmaan.leadGeneration.events.DO_CLOSE, { detail: {popup: instance.name} })); }; _close = function(event) { if (event.detail.popup === instance.name) { document.dispatchEvent(new window.CustomEvent(window.kunstmaan.leadGeneration.events.BEFORE_CLOSING, {detail: {popup: instance.name}})); window.kunstmaan.leadGeneration.log(instance.name + ": close event catched"); _$popup.removeClass('popup--show').addClass('popup--hide'); document.dispatchEvent(new window.CustomEvent(window.kunstmaan.leadGeneration.events.IS_CLOSING, {detail: {popup: instance.name}})); } }; _conversion = function(event) { if (event.detail.popup === instance.name) { window.kunstmaan.leadGeneration.log(instance.name + ': mark as converted'); var data = _getData(); data.already_converted = true; _setData(data); } }; _htmlSubmit = function(event) { event.preventDefault(); window.kunstmaan.leadGeneration.log(instance.name + ': html submit form'); var $form = _$submit.parents('form'); document.dispatchEvent(new window.CustomEvent(window.kunstmaan.leadGeneration.events.DO_SUBMIT_FORM, {detail: {popup: instance.name, form: $form}})); }; _submit = function(event) { if (event.detail.popup === instance.name) { window.kunstmaan.leadGeneration.log(instance.name + ': submit form'); var url = $(event.detail.form).attr('action'); var data = $(event.detail.form).serialize(); $.post(url, data, _onSubmitSuccess); } }; _onSubmitSuccess = function(data) { window.kunstmaan.leadGeneration.log(instance.name + ': onSubmitSuccess'); $('#' + htmlId + '--content').html(data); _listenToHtmlClicks(); }; _getData = function() { if (window.localStorage) { var data = window.localStorage.getItem('popup_' + instance.name); if (data != null) { return window.JSON.parse(data); } else { data = {'last_shown': null, 'already_converted': false, 'no_thanks': false}; _setData(data); } return data; } }; _setData = function(data) { if (window.localStorage) { window.localStorage.setItem('popup_' + instance.name, window.JSON.stringify(data)); } }; return instance; }; })(window, document, $);
webtown-php/KunstmaanBundlesCMS
src/Kunstmaan/LeadGenerationBundle/Resources/public/js/popup/Popup.js
JavaScript
mit
8,758
<?php /** * Zend Framework (http://framework.zend.com/) * * @link http://github.com/zendframework/zf2 for the canonical source repository * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ namespace Zend\Stdlib\Guard; use Traversable; /** * Provide a guard method for array or Traversable data */ trait ArrayOrTraversableGuardTrait { /** * Verifies that the data is an array or Traversable * * @param mixed $data the data to verify * @param string $dataName the data name * @param string $exceptionClass FQCN for the exception * @throws \Exception */ protected function guardForArrayOrTraversable( $data, $dataName = 'Argument', $exceptionClass = 'Zend\Stdlib\Exception\InvalidArgumentException' ) { if (!is_array($data) && !($data instanceof Traversable)) { $message = sprintf( "%s must be an array or Traversable, [%s] given", $dataName, is_object($data) ? get_class($data) : gettype($data) ); throw new $exceptionClass($message); } } }
wp-plugins/double-click
vendor/zendframework/zendframework/library/Zend/Stdlib/Guard/ArrayOrTraversableGuardTrait.php
PHP
mit
1,300
<?php /* V4.94 23 Jan 2007 (c) 2000-2007 John Lim (jlim#natsoft.com.my). All rights reserved. Released under both BSD license and Lesser GPL library license. Whenever there is any discrepancy between the two licenses, the BSD license will take precedence. Set tabs to 4 for best viewing. This class provides recordset pagination with First/Prev/Next/Last links. Feel free to modify this class for your own use as it is very basic. To learn how to use it, see the example in adodb/tests/testpaging.php. "Pablo Costa" <pablo@cbsp.com.br> implemented Render_PageLinks(). Please note, this class is entirely unsupported, and no free support requests except for bug reports will be entertained by the author. */ class ADODB_Pager { var $id; // unique id for pager (defaults to 'adodb') var $db; // ADODB connection object var $sql; // sql used var $rs; // recordset generated var $curr_page; // current page number before Render() called, calculated in constructor var $rows; // number of rows per page var $linksPerPage=10; // number of links per page in navigation bar var $showPageLinks; var $gridAttributes = 'width=100% border=1 bgcolor=white'; // Localize text strings here var $first = '<code>|&lt;</code>'; var $prev = '<code>&lt;&lt;</code>'; var $next = '<code>>></code>'; var $last = '<code>>|</code>'; var $moreLinks = '...'; var $startLinks = '...'; var $gridHeader = false; var $htmlSpecialChars = true; var $page = 'Page'; var $linkSelectedColor = 'red'; var $cache = 0; #secs to cache with CachePageExecute() //---------------------------------------------- // constructor // // $db adodb connection object // $sql sql statement // $id optional id to identify which pager, // if you have multiple on 1 page. // $id should be only be [a-z0-9]* // function ADODB_Pager(&$db,$sql,$id = 'adodb', $showPageLinks = false) { global $PHP_SELF; $curr_page = $id.'_curr_page'; if (empty($PHP_SELF)) $PHP_SELF = htmlspecialchars($_SERVER['PHP_SELF']); // htmlspecialchars() to prevent XSS attacks $this->sql = $sql; $this->id = $id; $this->db = $db; $this->showPageLinks = $showPageLinks; $next_page = $id.'_next_page'; if (isset($_GET[$next_page])) { $_SESSION[$curr_page] = (integer) $_GET[$next_page]; } if (empty($_SESSION[$curr_page])) $_SESSION[$curr_page] = 1; ## at first page $this->curr_page = $_SESSION[$curr_page]; } //--------------------------- // Display link to first page function Render_First($anchor=true) { global $PHP_SELF; if ($anchor) { ?> <a href="<?php echo $PHP_SELF,'?',$this->id;?>_next_page=1"><?php echo $this->first;?></a> &nbsp; <?php } else { print "$this->first &nbsp; "; } } //-------------------------- // Display link to next page function render_next($anchor=true) { global $PHP_SELF; if ($anchor) { ?> <a href="<?php echo $PHP_SELF,'?',$this->id,'_next_page=',$this->rs->AbsolutePage() + 1 ?>"><?php echo $this->next;?></a> &nbsp; <?php } else { print "$this->next &nbsp; "; } } //------------------ // Link to last page // // for better performance with large recordsets, you can set // $this->db->pageExecuteCountRows = false, which disables // last page counting. function render_last($anchor=true) { global $PHP_SELF; if (!$this->db->pageExecuteCountRows) return; if ($anchor) { ?> <a href="<?php echo $PHP_SELF,'?',$this->id,'_next_page=',$this->rs->LastPageNo() ?>"><?php echo $this->last;?></a> &nbsp; <?php } else { print "$this->last &nbsp; "; } } //--------------------------------------------------- // original code by "Pablo Costa" <pablo@cbsp.com.br> function render_pagelinks() { global $PHP_SELF; $pages = $this->rs->LastPageNo(); $linksperpage = $this->linksPerPage ? $this->linksPerPage : $pages; for($i=1; $i <= $pages; $i+=$linksperpage) { if($this->rs->AbsolutePage() >= $i) { $start = $i; } } $numbers = ''; $end = $start+$linksperpage-1; $link = $this->id . "_next_page"; if($end > $pages) $end = $pages; if ($this->startLinks && $start > 1) { $pos = $start - 1; $numbers .= "<a href=$PHP_SELF?$link=$pos>$this->startLinks</a> "; } for($i=$start; $i <= $end; $i++) { if ($this->rs->AbsolutePage() == $i) $numbers .= "<font color=$this->linkSelectedColor><b>$i</b></font> "; else $numbers .= "<a href=$PHP_SELF?$link=$i>$i</a> "; } if ($this->moreLinks && $end < $pages) $numbers .= "<a href=$PHP_SELF?$link=$i>$this->moreLinks</a> "; print $numbers . ' &nbsp; '; } // Link to previous page function render_prev($anchor=true) { global $PHP_SELF; if ($anchor) { ?> <a href="<?php echo $PHP_SELF,'?',$this->id,'_next_page=',$this->rs->AbsolutePage() - 1 ?>"><?php echo $this->prev;?></a> &nbsp; <?php } else { print "$this->prev &nbsp; "; } } //-------------------------------------------------------- // Simply rendering of grid. You should override this for // better control over the format of the grid // // We use output buffering to keep code clean and readable. function RenderGrid() { global $gSQLBlockRows; // used by rs2html to indicate how many rows to display include_once(ADODB_DIR.'/tohtml.inc.php'); ob_start(); $gSQLBlockRows = $this->rows; rs2html($this->rs,$this->gridAttributes,$this->gridHeader,$this->htmlSpecialChars); $s = ob_get_contents(); ob_end_clean(); return $s; } //------------------------------------------------------- // Navigation bar // // we use output buffering to keep the code easy to read. function RenderNav() { ob_start(); if (!$this->rs->AtFirstPage()) { $this->Render_First(); $this->Render_Prev(); } else { $this->Render_First(false); $this->Render_Prev(false); } if ($this->showPageLinks){ $this->Render_PageLinks(); } if (!$this->rs->AtLastPage()) { $this->Render_Next(); $this->Render_Last(); } else { $this->Render_Next(false); $this->Render_Last(false); } $s = ob_get_contents(); ob_end_clean(); return $s; } //------------------- // This is the footer function RenderPageCount() { if (!$this->db->pageExecuteCountRows) return ''; $lastPage = $this->rs->LastPageNo(); if ($lastPage == -1) $lastPage = 1; // check for empty rs. if ($this->curr_page > $lastPage) $this->curr_page = 1; return "<font size=-1>$this->page ".$this->curr_page."/".$lastPage."</font>"; } //----------------------------------- // Call this class to draw everything. function Render($rows=10) { global $ADODB_COUNTRECS; $this->rows = $rows; if ($this->db->dataProvider == 'informix') $this->db->cursorType = IFX_SCROLL; $savec = $ADODB_COUNTRECS; if ($this->db->pageExecuteCountRows) $ADODB_COUNTRECS = true; if ($this->cache) $rs = &$this->db->CachePageExecute($this->cache,$this->sql,$rows,$this->curr_page); else $rs = &$this->db->PageExecute($this->sql,$rows,$this->curr_page); $ADODB_COUNTRECS = $savec; $this->rs = &$rs; if (!$rs) { print "<h3>Query failed: $this->sql</h3>"; return; } if (!$rs->EOF && (!$rs->AtFirstPage() || !$rs->AtLastPage())) $header = $this->RenderNav(); else $header = "&nbsp;"; $grid = $this->RenderGrid(); $footer = $this->RenderPageCount(); $this->RenderLayout($header,$grid,$footer); $rs->Close(); $this->rs = false; } //------------------------------------------------------ // override this to control overall layout and formating function RenderLayout($header,$grid,$footer,$attributes='border=1 bgcolor=beige') { echo "<table ".$attributes."><tr><td>", $header, "</td></tr><tr><td>", $grid, "</td></tr><tr><td>", $footer, "</td></tr></table>"; } } ?>
EOL/eol_php_code
vendor/rdfapi-php/api/util/adodb/adodb-pager.inc.php
PHP
mit
8,407
/// <reference path="../../Phaser/Game.ts" /> (function () { var game = new Phaser.Game(this, 'game', 800, 600, preload, create); function preload() { // Using Phasers asset loader we load up a PNG from the assets folder game.load.image('bunny', 'assets/sprites/bunny.png'); } var bunny: Phaser.Sprite; function create() { // Here we'll assign the new sprite to the local bunny variable bunny = game.add.sprite(0, 0, 'bunny'); // You don't have to use the same values when scaling a sprite, // here we'll create a short and wide bunny bunny.transform.scale.setTo(3, 0.7); } })();
2016rshah/phaser
wip/TS Tests/sprites/scale sprite 3.ts
TypeScript
mit
685
package org.knowm.xchange.bleutrade.dto.marketdata; import java.util.HashMap; import java.util.Map; import javax.annotation.Generated; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; @JsonInclude(JsonInclude.Include.NON_NULL) @Generated("org.jsonschema2pojo") @JsonPropertyOrder({ "success", "message", "result" }) public class BleutradeOrderBookReturn { @JsonProperty("success") private Boolean success; @JsonProperty("message") private String message; @JsonProperty("result") private BleutradeOrderBook result; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); /** * @return The success */ @JsonProperty("success") public Boolean getSuccess() { return success; } /** * @param success The success */ @JsonProperty("success") public void setSuccess(Boolean success) { this.success = success; } /** * @return The message */ @JsonProperty("message") public String getMessage() { return message; } /** * @param message The message */ @JsonProperty("message") public void setMessage(String message) { this.message = message; } /** * @return The result */ @JsonProperty("result") public BleutradeOrderBook getResult() { return result; } /** * @param result The result */ @JsonProperty("result") public void setResult(BleutradeOrderBook result) { this.result = result; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return "BleutradeOrderBookReturn [success=" + success + ", message=" + message + ", result=" + result + ", additionalProperties=" + additionalProperties + "]"; } }
dozd/XChange
xchange-bleutrade/src/main/java/org/knowm/xchange/bleutrade/dto/marketdata/BleutradeOrderBookReturn.java
Java
mit
2,206
// Copyright (c) 2015-2018 The Bitcoin Core developers // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include <qt/platformstyle.h> #include <QApplication> #include <QColor> #include <QImage> #include <QPalette> static const struct { const char *platformId; /** Show images on push buttons */ const bool imagesOnButtons; /** Colorize single-color icons */ const bool colorizeIcons; /** Extra padding/spacing in transactionview */ const bool useExtraSpacing; } platform_styles[] = { {"macosx", false, false, true}, {"windows", true, false, false}, /* Other: linux, unix, ... */ {"other", true, true, false} }; static const unsigned platform_styles_count = sizeof(platform_styles)/sizeof(*platform_styles); namespace { /* Local functions for colorizing single-color images */ void MakeSingleColorImage(QImage& img, const QColor& colorbase) { img = img.convertToFormat(QImage::Format_ARGB32); for (int x = img.width(); x--; ) { for (int y = img.height(); y--; ) { const QRgb rgb = img.pixel(x, y); img.setPixel(x, y, qRgba(colorbase.red(), colorbase.green(), colorbase.blue(), qAlpha(rgb))); } } } QIcon ColorizeIcon(const QIcon& ico, const QColor& colorbase) { QIcon new_ico; for (const QSize& sz : ico.availableSizes()) { QImage img(ico.pixmap(sz).toImage()); MakeSingleColorImage(img, colorbase); new_ico.addPixmap(QPixmap::fromImage(img)); } return new_ico; } QImage ColorizeImage(const QString& filename, const QColor& colorbase) { QImage img(filename); MakeSingleColorImage(img, colorbase); return img; } QIcon ColorizeIcon(const QString& filename, const QColor& colorbase) { return QIcon(QPixmap::fromImage(ColorizeImage(filename, colorbase))); } } PlatformStyle::PlatformStyle(const QString &_name, bool _imagesOnButtons, bool _colorizeIcons, bool _useExtraSpacing): name(_name), imagesOnButtons(_imagesOnButtons), colorizeIcons(_colorizeIcons), useExtraSpacing(_useExtraSpacing), singleColor(0,0,0), textColor(0,0,0) { // Determine icon highlighting color if (colorizeIcons) { const QColor colorHighlightBg(QApplication::palette().color(QPalette::Highlight)); const QColor colorHighlightFg(QApplication::palette().color(QPalette::HighlightedText)); const QColor colorText(QApplication::palette().color(QPalette::WindowText)); const int colorTextLightness = colorText.lightness(); QColor colorbase; if (abs(colorHighlightBg.lightness() - colorTextLightness) < abs(colorHighlightFg.lightness() - colorTextLightness)) colorbase = colorHighlightBg; else colorbase = colorHighlightFg; singleColor = colorbase; } // Determine text color textColor = QColor(QApplication::palette().color(QPalette::WindowText)); } QImage PlatformStyle::SingleColorImage(const QString& filename) const { if (!colorizeIcons) return QImage(filename); return ColorizeImage(filename, SingleColor()); } QIcon PlatformStyle::SingleColorIcon(const QString& filename) const { if (!colorizeIcons) return QIcon(filename); return ColorizeIcon(filename, SingleColor()); } QIcon PlatformStyle::SingleColorIcon(const QIcon& icon) const { if (!colorizeIcons) return icon; return ColorizeIcon(icon, SingleColor()); } QIcon PlatformStyle::TextColorIcon(const QIcon& icon) const { return ColorizeIcon(icon, TextColor()); } const PlatformStyle *PlatformStyle::instantiate(const QString &platformId) { for (unsigned x=0; x<platform_styles_count; ++x) { if (platformId == platform_styles[x].platformId) { return new PlatformStyle( platform_styles[x].platformId, platform_styles[x].imagesOnButtons, platform_styles[x].colorizeIcons, platform_styles[x].useExtraSpacing); } } return nullptr; }
tjps/bitcoin
src/qt/platformstyle.cpp
C++
mit
4,143
//--------------------------------------------------------------------- // <copyright file="SelectExpandBuilderTests.cs" company="Microsoft"> // Copyright (C) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information. // </copyright> //--------------------------------------------------------------------- using System; using Microsoft.OData.Core.UriParser.Semantic; using Microsoft.OData.Core.UriParser.TreeNodeKinds; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace Microsoft.Test.OData.Query.TDD.Tests.UriBuilder { using Microsoft.OData.Core; using Microsoft.OData.Core.UriBuilder; using Microsoft.OData.Core.UriParser; using System.Collections.Generic; [TestClass] public class SelectExpandBuilderTests { protected static Uri ServiceRoot = new Uri("http://gobbledygook/"); protected readonly ODataUriParserSettings settings = new ODataUriParserSettings(); #region $select with no $expand [TestMethod] public void SelectSingleDeclaredPropertySucceeds() { Uri queryUri = new Uri("People?$select=Name", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$select=Name"), actualUri); } [TestMethod] public void SelectWithEmptyStringMeansEverything() { Uri queryUri = new Uri("People?$select=", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People"), actualUri); } [TestMethod] public void SelectWithNoStringMeansNothing() { Uri queryUri = new Uri("People?", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People"), actualUri); } [TestMethod] public void WildcardPreemptsAllStructuralProperties() { Uri queryUri = new Uri("People?$select=Name, *, MyAddress", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$select=*"), actualUri); } [TestMethod] public void SelectEnumStructuralProperty() { Uri queryUri = new Uri("Pet2Set?$select=PetColorPattern", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/Pet2Set?$select=PetColorPattern"), actualUri); } [TestMethod] public void SelectEnumStructuralPropertyWildcard() { Uri queryUri = new Uri("Pet2Set?$select=PetColorPattern,*", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/Pet2Set?$select=*"), actualUri); } [TestMethod] public void SelectNavigationPropertyWithoutExpandMeansSelectLink() { Uri queryUri = new Uri("People?$select=MyDog", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$select=MyDog"), actualUri); } [TestMethod] public void SelectActionMeansOperation() { Uri queryUri = new Uri("Dogs?$select=Fully.Qualified.Namespace.Walk", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/Dogs?$select=Fully.Qualified.Namespace.Walk"), actualUri); } [TestMethod] public void SelectWorksWithEntitySet() { Uri queryUri = new Uri("People?$select=Name", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$select=Name"), actualUri); } [TestMethod] public void MultipleSelectionsWorkWithEntitySet() { Uri queryUri = new Uri("People?$select=Name, MyDog", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Name,MyDog"), actualUri.OriginalString); } [TestMethod] public void SelectSupportsTypeSegments() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.Employee/PaintingsInOffice", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Employee/PaintingsInOffice"), actualUri.OriginalString); } [TestMethod] public void UnneededTypeSegmentInSelectIsOk() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.Employee/Name", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Employee/Name"), actualUri.OriginalString); } [TestMethod] public void TypeSegmentForVeryDerivedTypeAndSelectPropertyOfMiddleDerivedType() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.Manager/WorkEmail", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Manager/WorkEmail"), actualUri.OriginalString); } [TestMethod] public void SelectNavigationPropertyOnDerivedType() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.Manager/PaintingsInOffice", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Manager/PaintingsInOffice"), actualUri.OriginalString); } [TestMethod] public void SelectOpenPropertyOnDerivedType() { Uri queryUri = new Uri("Paintings?$select=Fully.Qualified.Namespace.FramedPainting/OpenProp", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/Paintings?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.FramedPainting/OpenProp"), actualUri.OriginalString); } [TestMethod] public void SelectOpenPropertyOnDerivedTypeWhereBaseTypeIsNotOpen() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.OpenEmployee/OpenProp", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.OpenEmployee/OpenProp"), actualUri.OriginalString); } [TestMethod] public void SelectFunctionWithOverloadsScopedByTypeSegment() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.Employee/Fully.Qualified.Namespace.HasDog", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Employee/Fully.Qualified.Namespace.HasDog"), actualUri.OriginalString); } [TestMethod] public void SelectActionWithOverloads() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.Move", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$select=Fully.Qualified.Namespace.Move"), actualUri); } [TestMethod] public void SelectActionWithOverloadsScopedByTypeSegment() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.Employee/Fully.Qualified.Namespace.Move", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Employee/Fully.Qualified.Namespace.Move"), actualUri.OriginalString); } [TestMethod] public void NamespaceQualifiedActionNameOnOpenTypeShouldBeInterpretedAsAnOperation() { Uri queryUri = new Uri("Paintings?$select=Fully.Qualified.Namespace.Restore", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/Paintings?$select=Fully.Qualified.Namespace.Restore"), actualUri); } [TestMethod] public void CanSelectSubPropertyOfComplexType() { Uri queryUri = new Uri("People?$select=MyAddress/City", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("MyAddress/City"), actualUri.OriginalString); } [TestMethod] public void SelectManyDeclaredPropertiesSucceeds() { Uri queryUri = new Uri("People?$select= Shoe, Birthdate,GeographyPoint, TimeEmployed, \tPreviousAddresses", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Shoe,Birthdate,GeographyPoint,TimeEmployed,PreviousAddresses"), actualUri.OriginalString); } [TestMethod] public void SelectOpenPropertySucceeds() { Uri queryUri = new Uri("Paintings?$select=SomeOpenProperty", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/Paintings?$select=SomeOpenProperty"), actualUri); } [TestMethod] public void SelectMixedOpenAndDeclaredPropertiesSucceeds() { Uri queryUri = new Uri("Paintings?$select=Artist, SomeOpenProperty", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/Paintings?$select=" + Uri.EscapeDataString("Artist,SomeOpenProperty"), actualUri.OriginalString); } [TestMethod] public void SelectingNamespaceQualifiedWildcardsShouldWork() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.*", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$select=Fully.Qualified.Namespace.*"), actualUri); } [TestMethod] public void ShouldIgnoreCommaAtEndofSelect() { Uri queryUri = new Uri("People?$select=MyDog,", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$select=MyDog"), actualUri); } #endregion $select with no $expand #region $expand with no $select [TestMethod] public void ExpandWithoutSelectShouldDefaultToAllSelections() { Uri queryUri = new Uri("People?$expand=MyDog", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$expand=MyDog"), actualUri); } [TestMethod] public void LastEmbeddedQueryOptionDoesNotRequireSemiColon() { Uri queryUri = new Uri("People?$expand=MyDog($expand=MyPeople)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("MyDog($expand=MyPeople)"), actualUri.OriginalString); } [TestMethod] public void BasicNestedExpansionsShouldWork() { Uri queryUri = new Uri("People?$expand=MyDog($expand=MyPeople($expand=MyPaintings))", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("MyDog($expand=MyPeople($expand=MyPaintings))"), actualUri.OriginalString); } [TestMethod] public void MultipleExpansionsShouldWork() { Uri queryUri = new Uri("People?$expand=MyDog, MyPaintings, MyFavoritePainting", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("MyDog,MyPaintings,MyFavoritePainting"), actualUri.OriginalString); } [TestMethod] public void MultipleExpandsOnTheSamePropertyAreCollapsed() { Uri queryUri = new Uri("People?$expand=MyDog, MyDog($expand=MyPeople)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("MyDog($expand=MyPeople)"), actualUri.OriginalString); } [TestMethod] public void ExpandNavigationPropertyOnDerivedType() { Uri queryUri = new Uri("People?$expand=Fully.Qualified.Namespace.Manager/PaintingsInOffice", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Manager/PaintingsInOffice"), actualUri.OriginalString); } [TestMethod] public void DeepExpandShouldBeMerged() { Uri queryUri = new Uri("People?$expand=MyDog($expand=MyPeople($expand=MyDog($expand=MyPeople($expand=MyPaintings)))), MyDog($expand=MyPeople($expand=MyDog($expand=MyPeople)))", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("MyDog($expand=MyPeople($expand=MyDog($expand=MyPeople($expand=MyPaintings))))"), actualUri.OriginalString); } [TestMethod] public void ExpandWithEnumSelect() { Uri queryUri = new Uri("Dogs?$expand=MyPeople($expand=MyPet2Set($select=PetColorPattern,Color))", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/Dogs?$expand=" + Uri.EscapeDataString("MyPeople($expand=MyPet2Set($select=PetColorPattern,Color))"), actualUri.OriginalString); } [TestMethod] public void ParseEnumPropertyOrderByWithinExpand() { Uri queryUri = new Uri("People?$expand=MyPet2Set($orderby=PetColorPattern desc)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("MyPet2Set($orderby=PetColorPattern desc)"), actualUri.OriginalString); } [TestMethod] public void RepeatedExpandWithTypeSegmentsShouldBeMerged() { Uri queryUri = new Uri("People?$expand=Fully.Qualified.Namespace.Manager/DirectReports, Fully.Qualified.Namespace.Manager/DirectReports", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Manager/DirectReports"), actualUri.OriginalString); } [TestMethod] public void DeepExpandWithDifferentTypeSegmentsShouldNotBeMerged() { Uri queryUri = new Uri("People?$expand=Fully.Qualified.Namespace.Manager/DirectReports, Fully.Qualified.Namespace.Employee/Manager", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Manager/DirectReports,Fully.Qualified.Namespace.Employee/Manager"), actualUri.OriginalString); } [TestMethod] public void ShouldIgnoreCommaAtEndofExpand() { Uri queryUri = new Uri("People?$expand=MyDog,", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$expand=MyDog"), actualUri); } [TestMethod] public void ExpandWithInnerQueryOptions() { Uri queryUri = new Uri("People?$expand=Fully.Qualified.Namespace.Manager/DirectReports($levels=max;$orderby=ID desc),Fully.Qualified.Namespace.Employee/Manager($levels=3)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual( "http://gobbledygook/People?$expand=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Manager/DirectReports($orderby=ID desc;$levels=max),Fully.Qualified.Namespace.Employee/Manager($levels=3)"), actualUri.OriginalString); } #endregion #region Interesting $expand with other options scenarios [TestMethod] public void NestedSelectPropertyWithJustNavPropAtParentLevelMeansJustOnePropertyAtInnerLevel() { Uri queryUri = new Uri("Dogs?$select=MyPeople&$expand=MyPeople($select=Name)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/Dogs?$select=" + Uri.EscapeDataString("MyPeople") + "&$expand=" + Uri.EscapeDataString("MyPeople($select=Name)"), actualUri.OriginalString); } [TestMethod] public void NestedSelectPropertyWithNothingSelectedAtParentLevelMeansAllAtTopLevelAndJustOnePropertyAtInnerLevel() { Uri queryUri = new Uri("Dogs?$expand=MyPeople($select=Name)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/Dogs?$expand=" + Uri.EscapeDataString("MyPeople($select=Name)"), actualUri.OriginalString); } [TestMethod] public void ExpandsDoNotHaveToAppearInSelectToBeSelected() { Uri queryUri = new Uri("People?$select=MyAddress&$expand=MyDog, MyFavoritePainting", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("MyAddress,MyDog,MyFavoritePainting") + "&$expand=" + Uri.EscapeDataString("MyDog,MyFavoritePainting"), actualUri.OriginalString); } [TestMethod] public void SomeExpandedNavPropsCanAppearInSelectAndAreRetainedAsNavPropLinks() { Uri queryUri = new Uri("People?$select=MyAddress, MyDog&$expand=MyDog, MyFavoritePainting", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("MyAddress,MyDog,MyFavoritePainting") + "&$expand=" + Uri.EscapeDataString("MyDog,MyFavoritePainting"), actualUri.OriginalString); } [TestMethod] public void MultipleDeepLevelExpansionsAndSelectionsShouldWork() { Uri queryUri = new Uri("People?$select=MyDog, MyFavoritePainting&$expand=MyDog($expand=MyPeople($select=Name)), MyFavoritePainting($select=Artist)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("MyDog,MyFavoritePainting") + "&$expand=" + Uri.EscapeDataString("MyDog($expand=MyPeople($select=Name)),MyFavoritePainting($select=Artist)"), actualUri.OriginalString); } [TestMethod] public void SimpleExpandAndOnlySelectIt() { Uri queryUri = new Uri("People?$select=MyDog&$expand=MyDog", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$select=MyDog&$expand=MyDog"), actualUri); } [TestMethod] public void ExpandSupportsTypeSegments() { Uri queryUri = new Uri("People?$expand=Fully.Qualified.Namespace.Employee/PaintingsInOffice", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Employee/PaintingsInOffice"), actualUri.OriginalString); } [TestMethod] public void UnneededTypeSegmentOnSelectButNotExpandIsIgnored() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.Employee/MyDog&$expand=MyDog", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Employee/MyDog,MyDog") + "&$expand=MyDog", actualUri.OriginalString); } [TestMethod] public void UnneededTypeOnExpandButNotSelectIsKept() { Uri queryUri = new Uri("People?$select=MyDog&$expand=Fully.Qualified.Namespace.Employee/MyDog", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("MyDog,Fully.Qualified.Namespace.Employee/MyDog") + "&$expand=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Employee/MyDog"), actualUri.OriginalString); } [TestMethod] public void SelectAndExpandWithDifferentTypesWorks() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.Employee/MyDog&$expand=Fully.Qualified.Namespace.Employee/MyDog", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Employee/MyDog") + "&$expand=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Employee/MyDog"), actualUri.OriginalString); } [TestMethod] public void ExpandSamePropertyOnTwoDifferentTypesWithoutASelectExpandsNavPropOnBothTypes() { Uri queryUri = new Uri("People?$expand=Fully.Qualified.Namespace.Employee/MyDog, Fully.Qualified.Namespace.Manager/MyDog", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Employee/MyDog,Fully.Qualified.Namespace.Manager/MyDog"), actualUri.OriginalString); } [TestMethod] public void WildCardOnExpandedNavigationPropertyAfterTypeSegment() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.Manager/MyPaintings&$expand=Fully.Qualified.Namespace.Manager/MyPaintings($select=*)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Manager/MyPaintings") + "&$expand=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Manager/MyPaintings($select=*)"), actualUri.OriginalString); } [TestMethod] public void WildCardOnExpandedNavigationPropertyOnDerivedType() { Uri queryUri = new Uri("People?$select=Fully.Qualified.Namespace.Manager/PaintingsInOffice&$expand=Fully.Qualified.Namespace.Manager/PaintingsInOffice($select=*)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Manager/PaintingsInOffice") + "&$expand=" + Uri.EscapeDataString("Fully.Qualified.Namespace.Manager/PaintingsInOffice($select=*)"), actualUri.OriginalString); } [TestMethod] public void MixOfSelectionTypesShouldWork() { Uri queryUri = new Uri("People?$select=Name,Birthdate,MyAddress,Fully.Qualified.Namespace.*,MyLions&$expand=MyDog", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Name,Birthdate,MyAddress,Fully.Qualified.Namespace.*,MyLions,MyDog") + "&$expand=" + Uri.EscapeDataString("MyDog"), actualUri.OriginalString); } [TestMethod] public void SelectingANavPropIsNotRecursiveAllSelection() { Uri queryUri = new Uri("People?$select=MyDog&$expand=MyDog($expand=MyPeople($select=*))", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("MyDog") + "&$expand=" + Uri.EscapeDataString("MyDog($expand=MyPeople($select=*))"), actualUri.OriginalString); } [TestMethod] public void SelectOnComplexTypeWorks() { Uri queryUri = new Uri("Paintings?$select=City&$expand=", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/Paintings?$select=" + Uri.EscapeDataString("City"), actualUri.OriginalString); } [TestMethod] public void SelectOnEnumTypeWorks() { Uri queryUri = new Uri("Pet2Set?$select=PetColorPattern", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/Pet2Set?$select=" + Uri.EscapeDataString("PetColorPattern"), actualUri.OriginalString); } [TestMethod] public void MultipleSelectsOnTheSameExpandItem() { Uri queryUri = new Uri("People?$expand=MyDog($select=Color,Breed)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("MyDog($select=Color,Breed)"), actualUri.OriginalString); } [TestMethod] public void RedundantExpandsWithUniqueSelectsArePropertyCollapsed() { Uri queryUri = new Uri("People?$expand=MyDog($select=Color), MyDog($select=Breed)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("MyDog($select=Breed,Color)"), actualUri.OriginalString); } [TestMethod] public void TypeSegmentsWorkOnSubExpands() { Uri queryUri = new Uri("Dogs?$expand=MyPeople($select=Fully.Qualified.Namespace.Employee/Name)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/Dogs?$expand=" + Uri.EscapeDataString("MyPeople($select=Fully.Qualified.Namespace.Employee/Name)"), actualUri.OriginalString); } [TestMethod] public void ExplicitNavPropIsAddedIfNeededAtDeeperLevels() { Uri queryUri = new Uri("People?$expand=MyDog($select=Color;$expand=MyPeople)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("MyDog($select=Color,MyPeople;$expand=MyPeople)"), actualUri.OriginalString); } [TestMethod] public void SelectAndExpandShouldWorkOnSelectComplexProperties() { Uri queryUri = new Uri("People?$select=Name,MyAddress/City,MyDog&$expand=MyDog($select=Color)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Name,MyAddress/City,MyDog") + "&$expand=" + Uri.EscapeDataString("MyDog($select=Color)"), actualUri.OriginalString); } [TestMethod] public void SelectAndExpandShouldWorkOnSelectComplexPropertiesWithTypeCast() { Uri queryUri = new Uri("People?$select=Name,MyAddress/Fully.Qualified.Namespace.HomeAddress/HomeNO,MyDog&$expand=MyDog($select=Color)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Name,MyAddress/Fully.Qualified.Namespace.HomeAddress/HomeNO,MyDog") + "&$expand=" + Uri.EscapeDataString("MyDog($select=Color)"), actualUri.OriginalString); } [TestMethod] public void SelectAndExpandShouldWorkOnSelectComplexPropertiesWithMultipleTypeCasts() { Uri queryUri = new Uri("People?$select=Name,MyAddress/Fully.Qualified.Namespace.HomeAddress/NextHome/Fully.Qualified.Namespace.HomeAddress/HomeNO,MyDog&$expand=MyDog($select=Color)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Name,MyAddress/Fully.Qualified.Namespace.HomeAddress/NextHome/Fully.Qualified.Namespace.HomeAddress/HomeNO,MyDog") + "&$expand=" + Uri.EscapeDataString("MyDog($select=Color)"), actualUri.OriginalString); } [TestMethod] public void SelectAndExpandShouldWorkOnSelectComplexPropertiesRecursively() { Uri queryUri = new Uri("People?$select=Name,MyAddress/NextHome/NextHome/City,MyDog&$expand=MyDog($select=Color)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Name,MyAddress/NextHome/NextHome/City,MyDog") + "&$expand=" + Uri.EscapeDataString("MyDog($select=Color)"), actualUri.OriginalString); } [TestMethod] public void SelectAndExpandShouldWorkOnSelectOpenProperty() { Uri queryUri = new Uri("People?$select=Name,MyOpenAddress/Test,MyDog&$expand=MyDog($select=Color)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Name,MyOpenAddress/Test,MyDog") + "&$expand=" + Uri.EscapeDataString("MyDog($select=Color)"), actualUri.OriginalString); } [TestMethod] public void TranslateSelectExpandClauseForExpandItemShouldWork() { string expandClause = "MyDog($filter=Color eq 'Brown';$orderby=Color;$expand=MyPeople/$ref)"; var topLeveItem = new ODataQueryOptionParser(HardCodedTestModel.TestModel, HardCodedTestModel.GetPersonType(), HardCodedTestModel.GetPeopleSet(), new Dictionary<string, string> { { "$expand", expandClause }, { "$select","" } }).ParseSelectAndExpand(); SelectExpandClauseToStringBuilder translater = new SelectExpandClauseToStringBuilder(); string result = translater.TranslateSelectExpandClause(topLeveItem, false); Assert.AreEqual("$expand=" + expandClause, result); } [TestMethod] public void TranslateSelectExpandClauseWithoutExpandRefOptionShouldWork() { string expandClause = "MyDog($expand=MyPeople/$ref)"; var topLeveItem = new ODataQueryOptionParser(HardCodedTestModel.TestModel, HardCodedTestModel.GetPersonType(), HardCodedTestModel.GetPeopleSet(), new Dictionary<string, string> { { "$expand", expandClause }, { "$select", "" } }).ParseSelectAndExpand(); SelectExpandClauseToStringBuilder translater = new SelectExpandClauseToStringBuilder(); string result = translater.TranslateSelectExpandClause(topLeveItem, false); Assert.AreEqual("$expand=" + expandClause, result); } #endregion #region mixed examples [TestMethod] public void SelectAllWithoutExpandShouldWork() { Uri queryUri = new Uri("People?$select=*", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$select=*"), actualUri); } [TestMethod] public void EmptySelectAndWithoutExpandShouldIgnored() { Uri queryUri = new Uri("People?$select = ", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People"), actualUri); } [TestMethod] public void SelectAllWithEmptyExpandShouldWork() { Uri queryUri = new Uri("People?$select = *&$expand = ", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$select=*"), actualUri); } [TestMethod] public void EmptySelectAndEmptyExpandShouldWork() { Uri queryUri = new Uri("People?$select = &$expand=", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People"), actualUri); } [TestMethod] public void SelectWithEmptyExpandShouldWork() { Uri queryUri = new Uri("People?$select = FirstName&$expand=", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$select=FirstName"), actualUri); } [TestMethod] public void EmptySelectWithExpandShouldWork() { Uri queryUri = new Uri("People?$select = &$expand=MyDog", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual(new Uri("http://gobbledygook/People?$expand=MyDog"), actualUri); } [TestMethod] public void SelectWithNestedExpandShouldWork() { Uri queryUri = new Uri("People?$select=Name,MyOpenAddress/Test&$expand=MyDog($filter=Color eq 'Brown';$orderby=Color; $search=Color)", UriKind.Relative); Uri actualUri = UriBuilder(queryUri, ODataUrlConventions.Default, settings); Assert.AreEqual("http://gobbledygook/People?$select=" + Uri.EscapeDataString("Name,MyOpenAddress/Test,MyDog") + "&$expand=" + Uri.EscapeDataString("MyDog($filter=Color eq 'Brown';$orderby=Color;$search=Color)"), actualUri.OriginalString); } [TestMethod] public void ExpandWithNestedQueryOptionsShouldWork() { var ervFilter = new EntityRangeVariable(ExpressionConstants.It, HardCodedTestModel.GetDogTypeReference(), HardCodedTestModel.GetDogsSet()); var ervOrderby = new EntityRangeVariable(ExpressionConstants.It, HardCodedTestModel.GetDogTypeReference(), HardCodedTestModel.GetDogsSet()); var expand = new ExpandedNavigationSelectItem( new ODataExpandPath(new NavigationPropertySegment(HardCodedTestModel.GetPersonMyDogNavProp(), null)), HardCodedTestModel.GetPeopleSet(), null, new FilterClause( new BinaryOperatorNode( BinaryOperatorKind.Equal, new SingleValuePropertyAccessNode(new EntityRangeVariableReferenceNode("$it", ervFilter), HardCodedTestModel.GetDogColorProp()), new ConstantNode("Brown", "'Brown'")), ervFilter), new OrderByClause( null, new SingleValuePropertyAccessNode(new EntityRangeVariableReferenceNode("$it", ervOrderby), HardCodedTestModel.GetDogColorProp()), OrderByDirection.Ascending, ervOrderby), 1, /* skipOption */ null, true, new SearchClause(new SearchTermNode("termX")), /* levelsOption*/ null); ODataUri uri = new ODataUri() { ServiceRoot = new Uri("http://gobbledygook/"), Path = new ODataPath(new EntitySetSegment(HardCodedTestModel.GetPeopleSet())), SelectAndExpand = new SelectExpandClause(new[] { expand }, true) }; Uri actualUri = new ODataUriBuilder(ODataUrlConventions.Default, uri).BuildUri(); Assert.AreEqual("http://gobbledygook/People?$expand=" + Uri.EscapeDataString("MyDog($filter=Color eq 'Brown';$orderby=Color;$top=1;$count=true;$search=termX)"), actualUri.OriginalString); } #endregion public static Uri UriBuilder(Uri queryUri, ODataUrlConventions urlConventions, ODataUriParserSettings settings) { ODataUriParser odataUriParser = new ODataUriParser(HardCodedTestModel.TestModel, ServiceRoot, queryUri); odataUriParser.UrlConventions = urlConventions; ODataUri odataUri = odataUriParser.ParseUri(); ODataUriBuilder odataUriBuilder = new ODataUriBuilder(urlConventions, odataUri); return odataUriBuilder.BuildUri(); } } }
pysco68/odata.net
test/FunctionalTests/Tests/DataOData/Tests/OData.Query.TDD.Tests/UriBuilder/SelectExpandBuilderTests.cs
C#
mit
40,177
var fs = require('fs'); var execSync = require('child_process').execSync; var exec = function (cmd) { execSync(cmd, {stdio: 'inherit'}); }; /* global jake, task, desc, publishTask */ task('build', ['lint', 'clean', 'browserify', 'minify'], function () { console.log('Build completed.'); }); desc('Cleans browerified/minified files and package files'); task('clean', ['clobber'], function () { jake.rmRf('./ejs.js'); jake.rmRf('./ejs.min.js'); console.log('Cleaned up compiled files.'); }); desc('Lints the source code'); task('lint', ['clean'], function () { exec('./node_modules/.bin/eslint "**/*.js"'); console.log('Linting completed.'); }); task('browserify', function () { exec('./node_modules/browserify/bin/cmd.js --standalone ejs lib/ejs.js > ejs.js'); console.log('Browserification completed.'); }); task('minify', function () { exec('./node_modules/uglify-js/bin/uglifyjs ejs.js > ejs.min.js'); console.log('Minification completed.'); }); desc('Generates the EJS API docs'); task('doc', function (dev) { jake.rmRf('out'); var p = dev ? '-p' : ''; exec('./node_modules/.bin/jsdoc ' + p + ' -c jsdoc.json lib/* docs/jsdoc/*'); console.log('Documentation generated.'); }); desc('Publishes the EJS API docs'); task('docPublish', ['doc'], function () { fs.writeFileSync('out/CNAME', 'api.ejs.co'); console.log('Pushing docs to gh-pages...'); exec('./node_modules/.bin/git-directory-deploy --directory out/'); console.log('Docs published to gh-pages.'); }); desc('Runs the EJS test suite'); task('test', ['lint'], function () { exec('./node_modules/.bin/mocha'); }); publishTask('ejs', ['build'], function () { this.packageFiles.include([ 'jakefile.js', 'README.md', 'LICENSE', 'package.json', 'postinstall.js', 'ejs.js', 'ejs.min.js', 'lib/**' ]); }); jake.Task.publish.on('complete', function () { console.log('Updating hosted docs...'); console.log('If this fails, run jake docPublish to re-try.'); jake.Task.docPublish.invoke(); });
ealbertos/dotfiles
vscode.symlink/extensions/ms-mssql.mssql-1.11.1/node_modules/ejs/jakefile.js
JavaScript
mit
2,033
// // SQRLUpdater.h // Squirrel // // Created by Justin Spahr-Summers on 2013-07-21. // Copyright (c) 2013 GitHub. All rights reserved. // #import <Foundation/Foundation.h> #import <ReactiveCocoa/ReactiveCocoa.h> // Represents the current state of the updater. // // SQRLUpdaterStateIdle - Doing absolutely diddly squat. // SQRLUpdaterStateCheckingForUpdate - Checking for any updates from the server. // SQRLUpdaterStateDownloadingUpdate - Update found, downloading the archive. // SQRLUpdaterStateAwaitingRelaunch - Awaiting a relaunch to install // the update. typedef enum : NSUInteger { SQRLUpdaterStateIdle, SQRLUpdaterStateCheckingForUpdate, SQRLUpdaterStateDownloadingUpdate, SQRLUpdaterStateAwaitingRelaunch, } SQRLUpdaterState; // Block for providing download requests given a download url typedef NSURLRequest * (^SQRLRequestForDownload)(NSURL *); // The domain for errors originating within SQRLUpdater. extern NSString * const SQRLUpdaterErrorDomain; // The downloaded update does not contain an app bundle, or it was deleted on // disk before we could get to it. extern const NSInteger SQRLUpdaterErrorMissingUpdateBundle; // An error occurred in the out-of-process updater while it was setting up. extern const NSInteger SQRLUpdaterErrorPreparingUpdateJob; // The code signing requirement for the running application could not be // retrieved. extern const NSInteger SQRLUpdaterErrorRetrievingCodeSigningRequirement; // The server sent a response that we didn't understand. // // Includes `SQRLUpdaterServerDataErrorKey` in the error's `userInfo`. extern const NSInteger SQRLUpdaterErrorInvalidServerResponse; // The server sent a response body that we didn't understand. // // Includes `SQRLUpdaterServerDataErrorKey` in the error's `userInfo`. extern const NSInteger SQRLUpdaterErrorInvalidServerBody; // The server sent update JSON that we didn't understand. // // Includes `SQRLUpdaterJSONObjectErrorKey` in the error's `userInfo`. extern const NSInteger SQRLUpdaterErrorInvalidJSON; // Associated with the `NSData` received from the server when an error with code // `SQRLUpdaterErrorInvalidServerResponse` is generated. extern NSString * const SQRLUpdaterServerDataErrorKey; // Associated with the JSON object that was received from the server when an // error with code `SQRLUpdaterErrorInvalidJSON` is generated. extern NSString * const SQRLUpdaterJSONObjectErrorKey; @class RACCommand; @class RACDisposable; @class RACSignal; /// Type of mode used to download the release typedef enum { JSONFILE=1, RELEASESERVER } SQRLUpdaterMode; // Checks for, downloads, and installs updates. @interface SQRLUpdater : NSObject // Kicks off a check for updates. // // If an update is available, it will be sent on `updates` once downloaded. @property (nonatomic, strong, readonly) RACCommand *checkForUpdatesCommand; // The current state of the manager. // // This property is KVO-compliant. @property (atomic, readonly) SQRLUpdaterState state; // Sends an `SQRLDownloadedUpdate` object on the main thread whenever a new // update is available. // // This signal is actually just `checkForUpdatesCommand.executionSignals`, // flattened for convenience. @property (nonatomic, strong, readonly) RACSignal *updates; // The request that will be sent to check for updates. // // The default value is the argument that was originally passed to // -initWithUpdateRequest:. // // This property must never be set to nil. @property (atomic, copy) NSURLRequest *updateRequest; // The block used for fetching a given download request // // The default value is the argument that was originally passed to // -initWithUpdateRequest:requestForDownload:. // // If initialized with -initWithUpdateRequest: this block will // return a generic NSURLRequest with the provided url. @property (nonatomic, copy) SQRLRequestForDownload requestForDownload; // The `SQRLUpdate` subclass to instantiate with the server's response. // // By default, this is `SQRLUpdate` itself, but it can be set to a custom // subclass in order to preserve additional JSON data. See the `SQRLUpdate` // documentation for more information. @property (atomic, strong) Class updateClass; // Initializes an updater that will send the given request to check for updates. // // This is the designated initializer for this class. // // updateRequest - A request to send to check for updates. This request can be // customized as desired, like by including an `Authorization` // header to authenticate with a private update server, or // pointing to a local URL for testing. This must not be nil. // // Returns the initialized `SQRLUpdater`. - (id)initWithUpdateRequest:(NSURLRequest *)updateRequest; // Initializes an updater that will send the given request to check for updates // on a CDN reading a release file in json format. // // updateRequest - A request to send to check for updates. This request can be // customized as desired, like by including an `Authorization` // header to authenticate with a private update server, or // pointing to a local URL for testing. This must not be nil. // forVersion - the currently installed version // // Returns the initialized `SQRLUpdater`. - (id)initWithUpdateRequest:(NSURLRequest *)updateRequest forVersion:(NSString*)version; // Initializes an updater that will send the given request to check for updates // and passes a block to provide requests for the update downloads. // // updateRequest - Same as with initWithUpdateRequest // requestForDownload - Once the update url is found for the update download, allow // providing custom requests that can be costomized as desired. // Useful for including `Authorization` headers just like the // updateRequest param. // // Returns the initialized `SQRLUpdater`. - (id)initWithUpdateRequest:(NSURLRequest *)updateRequest requestForDownload:(SQRLRequestForDownload)requestForDownload; // Initializes an updater that will send the given request to check for updates // and passes a block to provide requests for the update downloads. // // updateRequest - Same as with initWithUpdateRequest // requestForDownload - Once the update url is found for the update download, allow // providing custom requests that can be costomized as desired. // Useful for including `Authorization` headers just like the // updateRequest param. // forVersion - currently running version // useMode - either RELEASESERVER or JSONFILE // // Returns the initialized `SQRLUpdater`. - (id)initWithUpdateRequest:(NSURLRequest *)updateRequest requestForDownload:(SQRLRequestForDownload)requestForDownload forVersion:(NSString*) version useMode:(SQRLUpdaterMode) mode; // Executes `checkForUpdatesCommand` (if enabled) every `interval` seconds. // // The first check will not occur until `interval` seconds have passed. // // interval - The interval, in seconds, between each check. // // Returns a disposable which can be used to cancel the automatic update // checking. - (RACDisposable *)startAutomaticChecksWithInterval:(NSTimeInterval)interval; // Terminates the running application to install any available update, then // automatically relaunches the app after updating. // // This method is only useful if you want the application to automatically // relaunch. Otherwise, you can simply use `-[NSApplication terminate:]` or any // other exit mechanism. // // After invoking this method, the receiver is responsible for terminating the // application upon success. The app must not be terminated in any other way // unless an error occurs. // // Returns a signal that will error on the main scheduler if anything goes // wrong before termination. The signal will never complete. - (RACSignal *)relaunchToInstallUpdate; - (BOOL)isRunningOnReadOnlyVolume; - (RACSignal *)updateFromJSONData:(NSData *)data; @end @interface SQRLUpdater (Unavailable) - (id)init __attribute__((unavailable("Use -initWithUpdateRequest: instead"))); @end
ThomasBaumer/inofficial-WhatsApp-client
releases/WhatsApp Desktop-darwin-x64/WhatsApp Desktop.app/Contents/Frameworks/Squirrel.framework/Versions/A/Headers/SQRLUpdater.h
C
mit
8,236
define(["npm:aurelia-loader-default@1.0.0-beta.1.0.1/aurelia-loader-default"], function(main) { return main; });
Mteuahasan/ror-microblog
public/jspm_packages/npm/aurelia-loader-default@1.0.0-beta.1.0.1.js
JavaScript
mit
114
--- layout: post title: "Visualize and Search Your Auth0 Logs Using Sumo Logic" description: The Auth0 Logs to Sumo Logic extension helps you deal with security incidents and performance issues date: 2016-06-06 8:30 alias: /2016/06/06/visualize-your-auth0-logs-using-sumo-logic/ category: Auth0-based Tutorial, Tools author: name: Eugene Kogan url: https://twitter.com/eugk mail: eugene.kogan@auth0.com avatar: https://s.gravatar.com/avatar/667b1c82b6cc2241ff176d50c65da603?s=200 design: bg_color: "#16214D" image: https://cdn.auth0.com/blog/sumo-logic/sumo-logic-logo.jpg tags: - visualization - extensions - security - sumo-logic - logging - logs related: - 2016-04-07-integrate-auth0-into-your-existing-saas-tools - 2016-03-10-introducing-auth0-collections-for-postman --- If you're a user of both Auth0 and Sumo Logic, you can use this awesome [Auth0 extension](https://github.com/auth0/auth0-logs-to-sumologic) to send your tenant logs to Sumo Logic. Having all your logs centrally located makes them more valuable and easier to correlate when dealing with something like a security incident or a performance issue. Amazingly, you can sign up for a free [Sumo Logic](https://www.sumologic.com/pricing/) account and collect up to 500MB of data per day! That is more than enough to get started. {% include tweet_quote.html quote_text="Having all your logs centrally located makes them more valuable and easier to correlate." %} It's super easy to install the "Auth0 Logs to Sumo Logic" extension right from your Auth0 account [Dashboard](https://manage.auth0.com/#/extensions). Simply login, click on Extensions, then find and click on the Sumo Logic icon to configure and enable the extension. ![Sumo Logic extension icon](https://cdn.auth0.com/blog/sumo-logic/sl-ext-icon.png) Once enabled, the extension configuration screen will be displayed. You'll need to decide on a few simple settings, but the defaults are all reasonable. One piece of information you will need to supply is the URL of your Sumo Logic HTTP collector endpoint. If you don't already have one, follow the [Sumo Logic instructions](https://help.sumologic.com/Send_Data/Sources/HTTP_Source) for creating an HTTP source and paste the URL it generates into the Auth0 extension configuration settings. We recommend naming the source category `auth0_logs`. ![Sumo Logic extension configuration screen](https://cdn.auth0.com/blog/sumo-logic/sl-ext-config.png) Data should begin appearing in Sumo Logic a few minutes after you enable the extension. A simple search like `_sourceCategory=auth0_logs` will show you the most recent log events. Getting the top 10 users for a given time period is as easy as this query: ``` _sourceCategory=auth0_logs | json auto | count user_name | top 10 user_name by _count ``` Want to create a chart showing the popularity of a particular client based on the number of logins per hour over a few days? Sure, you can do that in Sumo Logic with just a few commands: ``` _sourceCategory=auth0_logs salesforce | json auto | timeslice 1h | count by _timeslice, client_name | transpose row _timeslice column client_name ``` The resulting chart will look something like this: ![Sumo Logic trend chart sample](https://cdn.auth0.com/blog/sumo-logic/sl-chart-sample.png) We have been using the Auth0 to Sumo Logic extension ourselves since it was first released, and it's proven to be very useful for staying on top of what's happening with our own Auth0 accounts and our internal users (employees). Sumo Logic makes it easy to see the latest failed logins, find and alert on error messages, create charts to visualize trends, or even do complex statistical analysis on your data. To help us (and our customers) visualize these logs, we spent some time creating a couple of dashboards. The Sumo Logic for Auth0 dashboards show you the output of several saved searches all on one easy to read screen, and makes it easy to zoom in or drill down when something looks interesting. <a target="_blank" href="https://cdn.auth0.com/blog/sumo-logic/sl-db-screenshot.jpg">![Sumo Logic dashboard for Auth0 logs](https://cdn.auth0.com/blog/sumo-logic/sl-db-screenshot.jpg)</a><!-- __ --> If you're a Sumo Logic customer and are interested in trying out these dashboards, just let us know via [Support Center](https://support.auth0.com) (be sure to include your Sumo Logic account name) and we will gladly share it with you. Once it's available through your account, you're free to customize it, add to it, create alerts based on the searches, or really anything else that you find useful! Here are the saved searches we've created so far: ![Sumo Logic saved searches for Auth0 logs](https://cdn.auth0.com/blog/sumo-logic/sl-saved-searches.jpg) We'd love to hear what you think, especially if you've got a great idea that we should incorporate back into our original version. Our goal is to eventually publish these dashboards via a real Sumo Logic app, so that it's automatically available to all users. Have fun analyzing and visualizing those logs!
guillermoga88/blog
_posts/2016-06-06-visualize-your-auth0-logs-using-sumo-logic.markdown
Markdown
mit
5,061