text
stringlengths
3
1.05M
using System; using System.Diagnostics; using System.Threading; using System.Threading.Tasks; using Orleans; using Orleans.Runtime; using UnitTests.TestHelper; using Xunit; using Xunit.Abstractions; namespace UnitTests.AsyncPrimitivesTests { public class AsyncPipelineTests { private readonly ITestOutputHelper output; private const int _iterationCount = 100; private const int _defaultPipelineCapacity = 2; public AsyncPipelineTests(ITestOutputHelper output) { this.output = output; } [Fact, TestCategory("Functional"), TestCategory("AsynchronyPrimitives")] public void AsyncPipelineSimpleTest() { int step = 1000; var done = TimedCompletions(step, step, step); var pipeline = new AsyncPipeline(2); Stopwatch watch = new Stopwatch(); watch.Start(); pipeline.Add(done[0]); const int epsilon = 100; var elapsed0 = watch.ElapsedMilliseconds; Assert.True(elapsed0 < epsilon, elapsed0.ToString()); pipeline.Add(done[2]); var elapsed1 = watch.ElapsedMilliseconds; Assert.True(elapsed1 < epsilon, elapsed1.ToString()); pipeline.Add(done[1]); var elapsed2 = watch.ElapsedMilliseconds; Assert.True(step - epsilon <= elapsed2 && elapsed2 <= step + epsilon); pipeline.Wait(); watch.Stop(); Assert.True(3 * step - epsilon <= watch.ElapsedMilliseconds && watch.ElapsedMilliseconds <= 3 * step + epsilon); } [Fact, TestCategory("Functional"), TestCategory("AsynchronyPrimitives")] public void AsyncPipelineWaitTest() { Random rand = new Random(222); int started = 0; int finished1 = 0; int finished2 = 0; int numActions = 1000; Action action1 = (() => { lock (this) started++; Thread.Sleep((int)(rand.NextDouble() * 100)); lock (this) finished1++; }); Action action2 = (() => { Thread.Sleep((int)(rand.NextDouble() * 100)); lock (this) finished2++; }); var pipeline = new AsyncPipeline(10); for (int i = 0; i < numActions; i++) { var async1 = Task.Run(action1); pipeline.Add(async1); var async2 = async1.ContinueWith(_ => action2()); pipeline.Add(async2); } pipeline.Wait(); Assert.Equal(numActions, started); Assert.Equal(numActions, finished1); Assert.Equal(numActions, finished2); } private static Task[] TimedCompletions(params int[] waits) { var result = new Task[waits.Length]; var accum = 0; for (var i = 0; i < waits.Length; ++i) { accum += waits[i]; result[i] = Task.Delay(accum); } return result; } [Fact, TestCategory("Functional"), TestCategory("AsynchronyPrimitives")] public async Task AsyncPipelineSingleThreadedBlackBoxConsistencyTest() { await AsyncPipelineBlackBoxConsistencyTest(1); } private async Task AsyncPipelineBlackBoxConsistencyTest(int workerCount) { if (workerCount < 1) throw new ArgumentOutOfRangeException("You must specify at least one worker.", "workerCount"); int loopCount = _iterationCount / workerCount; const double variance = 0.1; int expectedTasksCompleted = loopCount * workerCount; var delayLength = TimeSpan.FromSeconds(1); const int pipelineCapacity = _defaultPipelineCapacity; var pipeline = new AsyncPipeline(pipelineCapacity); int tasksCompleted = 0; // the following value is wrapped within an array to avoid a modified closure warning from ReSharper. int[] pipelineSize = { 0 }; var capacityReached = new InterlockedFlag(); Action workFunc = () => { var sz = Interlocked.Increment(ref pipelineSize[0]); CheckPipelineState(sz, pipelineCapacity, capacityReached); Task.Delay(delayLength).Wait(); Interlocked.Decrement(ref pipelineSize[0]); Interlocked.Increment(ref tasksCompleted); }; Action workerFunc = () => { for (var j = 0; j < loopCount; j++) { Task task = new Task(workFunc); pipeline.Add(task, whiteBox: null); task.Start(); } }; Func<Task> monitorFunc = async () => { var delay = TimeSpan.FromSeconds(5); while (tasksCompleted < expectedTasksCompleted) { output.WriteLine("test in progress: tasksCompleted = {0}.", tasksCompleted); await Task.Delay(delay); } }; var workers = new Task[workerCount]; var stopwatch = Stopwatch.StartNew(); for (var i = 0; i < workerCount; ++i) workers[i] = Task.Run(workerFunc); Task.Run(monitorFunc).Ignore(); await Task.WhenAll(workers); pipeline.Wait(); stopwatch.Stop(); Assert.Equal(expectedTasksCompleted, tasksCompleted); // "The test did not complete the expected number of tasks." var targetTimeSec = expectedTasksCompleted * delayLength.TotalSeconds / pipelineCapacity; var minTimeSec = (1.0 - variance) * targetTimeSec; var maxTimeSec = (1.0 + variance) * targetTimeSec; var actualSec = stopwatch.Elapsed.TotalSeconds; output.WriteLine( "Test finished in {0} sec, {1}% of target time {2} sec. Permitted variance is +/-{3}%", actualSec, actualSec / targetTimeSec * 100, targetTimeSec, variance * 100); Assert.True(capacityReached.IsSet, "Pipeline capacity not reached; the delay length probably is too short to be useful."); Assert.True( actualSec >= minTimeSec, string.Format("The unit test completed too early ({0} sec < {1} sec).", actualSec, minTimeSec)); Assert.True( actualSec <= maxTimeSec, string.Format("The unit test completed too late ({0} sec > {1} sec).", actualSec, maxTimeSec)); } private void CheckPipelineState(int size, int capacity, InterlockedFlag capacityReached) { Assert.True(size >= 0); Assert.True(capacity > 0); // a understood flaw of the current algorithm is that the capacity can be exceeded by one item. we've decided that this is acceptable and we allow it to happen. Assert.True(size <= capacity, string.Format("size ({0}) must be less than the capacity ({1})", size, capacity)); if (capacityReached != null && size == capacity) capacityReached.TrySet(); } } }
package com.zegoggles.smssync.mail; import android.provider.CallLog; import com.fsck.k9.mail.Message; import com.fsck.k9.mail.internet.MimeMessage; import com.zegoggles.smssync.MmsConsts; import com.zegoggles.smssync.SmsConsts; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricTestRunner; import java.util.Date; import java.util.HashMap; import java.util.Map; import static com.zegoggles.smssync.mail.Headers.get; import static org.fest.assertions.api.Assertions.assertThat; @RunWith(RobolectricTestRunner.class) public class HeaderGeneratorTest { private HeaderGenerator generator; @Before public void before() { generator = new HeaderGenerator("ref", "1.0"); } @Test public void testShouldGenerateStandardHeaders() throws Exception { Message message = new MimeMessage(); Map<String, String> map = new HashMap<String, String>(); Date sent = new Date(); PersonRecord person = new PersonRecord(0, null, null, null); generator.setHeaders(message, map, DataType.SMS, "1234", person, sent, 0); assertThat(get(message, Headers.ADDRESS)).isEqualTo("1234"); assertThat(get(message, Headers.DATATYPE)).isEqualTo("SMS"); assertThat(get(message, Headers.BACKUP_TIME)).isNotEmpty(); assertThat(get(message, Headers.VERSION)).isNotEmpty(); assertThat(message.getMessageId()).contains("sms-backup-plus.local"); assertThat(message.getSentDate()).isEqualTo(sent); assertThat(message.getReferences()).isNotEmpty(); } @Test public void testShouldGenerateSMSHeaders() throws Exception { Message message = new MimeMessage(); Map<String, String> map = new HashMap<String, String>(); Date sent = new Date(); PersonRecord person = new PersonRecord(0, null, null, null); map.put(SmsConsts.ID, "someId"); map.put(SmsConsts.TYPE, "type"); map.put(SmsConsts.DATE, "date"); map.put(SmsConsts.THREAD_ID, "tid"); map.put(SmsConsts.READ, "read"); map.put(SmsConsts.STATUS, "status"); map.put(SmsConsts.PROTOCOL, "protocol"); map.put(SmsConsts.SERVICE_CENTER, "svc"); generator.setHeaders(message, map, DataType.SMS, "1234", person, sent, 0); assertThat(get(message, Headers.ID)).isEqualTo("someId"); assertThat(get(message, Headers.TYPE)).isEqualTo("type"); assertThat(get(message, Headers.DATE)).isEqualTo("date"); assertThat(get(message, Headers.THREAD_ID)).isEqualTo("tid"); assertThat(get(message, Headers.READ)).isEqualTo("read"); assertThat(get(message, Headers.STATUS)).isEqualTo("status"); assertThat(get(message, Headers.PROTOCOL)).isEqualTo("protocol"); assertThat(get(message, Headers.SERVICE_CENTER)).isEqualTo("svc"); } @Test public void testShouldGenerateCallLogHeaders() throws Exception { Message message = new MimeMessage(); Map<String, String> map = new HashMap<String, String>(); Date sent = new Date(); PersonRecord person = new PersonRecord(0, null, null, null); map.put(CallLog.Calls._ID, "id"); map.put(CallLog.Calls.TYPE, "type"); map.put(CallLog.Calls.DURATION, "duration"); map.put(CallLog.Calls.DATE, "date"); generator.setHeaders(message, map, DataType.CALLLOG, "1234", person, sent, 0); assertThat(get(message, Headers.ID)).isEqualTo("id"); assertThat(get(message, Headers.TYPE)).isEqualTo("type"); assertThat(get(message, Headers.DURATION)).isEqualTo("duration"); assertThat(get(message, Headers.DATE)).isEqualTo("date"); } @Test public void testShouldGenerateMMSHeaders() throws Exception { Message message = new MimeMessage(); Map<String, String> map = new HashMap<String, String>(); Date sent = new Date(); PersonRecord person = new PersonRecord(0, null, null, null); map.put(MmsConsts.ID, "id"); map.put(MmsConsts.TYPE, "type"); map.put(MmsConsts.THREAD_ID, "tid"); map.put(MmsConsts.DATE, "date"); map.put(MmsConsts.READ, "read"); generator.setHeaders(message, map, DataType.MMS, "1234", person, sent, 0); assertThat(get(message, Headers.ID)).isEqualTo("id"); assertThat(get(message, Headers.TYPE)).isEqualTo("type"); assertThat(get(message, Headers.THREAD_ID)).isEqualTo("tid"); assertThat(get(message, Headers.READ)).isEqualTo("read"); assertThat(get(message, Headers.DATE)).isEqualTo("date"); } @Test public void testShouldSetHeadersWithNullAddress() throws Exception { Message message = new MimeMessage(); Map<String, String> map = new HashMap<String, String>(); Date sent = new Date(); PersonRecord person = new PersonRecord(0, null, null, null); generator.setHeaders(message, map, DataType.SMS, null, person, sent, 0); } }
package libtrust import ( "bytes" "crypto" "crypto/x509" "encoding/base64" "encoding/json" "errors" "fmt" "time" "unicode" ) var ( // ErrInvalidSignContent is used when the content to be signed is invalid. ErrInvalidSignContent = errors.New("invalid sign content") // ErrInvalidJSONContent is used when invalid json is encountered. ErrInvalidJSONContent = errors.New("invalid json content") // ErrMissingSignatureKey is used when the specified signature key // does not exist in the JSON content. ErrMissingSignatureKey = errors.New("missing signature key") ) type jsHeader struct { JWK PublicKey `json:"jwk,omitempty"` Algorithm string `json:"alg"` Chain []string `json:"x5c,omitempty"` } type jsSignature struct { Header *jsHeader `json:"header"` Signature string `json:"signature"` Protected string `json:"protected,omitempty"` } type signKey struct { PrivateKey Chain []*x509.Certificate } // JSONSignature represents a signature of a json object. type JSONSignature struct { payload string signatures []*jsSignature indent string formatLength int formatTail []byte } func newJSONSignature() *JSONSignature { return &JSONSignature{ signatures: make([]*jsSignature, 0, 1), } } // Payload returns the encoded payload of the signature. This // payload should not be signed directly func (js *JSONSignature) Payload() ([]byte, error) { return joseBase64UrlDecode(js.payload) } func (js *JSONSignature) protectedHeader() (string, error) { protected := map[string]interface{}{ "formatLength": js.formatLength, "formatTail": joseBase64UrlEncode(js.formatTail), "time": time.Now().UTC().Format(time.RFC3339), } protectedBytes, err := json.Marshal(protected) if err != nil { return "", err } return joseBase64UrlEncode(protectedBytes), nil } func (js *JSONSignature) signBytes(protectedHeader string) ([]byte, error) { buf := make([]byte, len(js.payload)+len(protectedHeader)+1) copy(buf, protectedHeader) buf[len(protectedHeader)] = '.' copy(buf[len(protectedHeader)+1:], js.payload) return buf, nil } // Sign adds a signature using the given private key. func (js *JSONSignature) Sign(key PrivateKey) error { protected, err := js.protectedHeader() if err != nil { return err } signBytes, err := js.signBytes(protected) if err != nil { return err } sigBytes, algorithm, err := key.Sign(bytes.NewReader(signBytes), crypto.SHA256) if err != nil { return err } header := &jsHeader{ JWK: key.PublicKey(), Algorithm: algorithm, } sig := &jsSignature{ Header: header, Signature: joseBase64UrlEncode(sigBytes), Protected: protected, } js.signatures = append(js.signatures, sig) return nil } // SignWithChain adds a signature using the given private key // and setting the x509 chain. The public key of the first element // in the chain must be the public key corresponding with the sign key. func (js *JSONSignature) SignWithChain(key PrivateKey, chain []*x509.Certificate) error { // Ensure key.Chain[0] is public key for key //key.Chain.PublicKey //key.PublicKey().CryptoPublicKey() // Verify chain protected, err := js.protectedHeader() if err != nil { return err } signBytes, err := js.signBytes(protected) if err != nil { return err } sigBytes, algorithm, err := key.Sign(bytes.NewReader(signBytes), crypto.SHA256) if err != nil { return err } header := &jsHeader{ Chain: make([]string, len(chain)), Algorithm: algorithm, } for i, cert := range chain { header.Chain[i] = base64.StdEncoding.EncodeToString(cert.Raw) } sig := &jsSignature{ Header: header, Signature: joseBase64UrlEncode(sigBytes), Protected: protected, } js.signatures = append(js.signatures, sig) return nil } // Verify verifies all the signatures and returns the list of // public keys used to sign. Any x509 chains are not checked. func (js *JSONSignature) Verify() ([]PublicKey, error) { keys := make([]PublicKey, len(js.signatures)) for i, signature := range js.signatures { signBytes, err := js.signBytes(signature.Protected) if err != nil { return nil, err } var publicKey PublicKey if len(signature.Header.Chain) > 0 { certBytes, err := base64.StdEncoding.DecodeString(signature.Header.Chain[0]) if err != nil { return nil, err } cert, err := x509.ParseCertificate(certBytes) if err != nil { return nil, err } publicKey, err = FromCryptoPublicKey(cert.PublicKey) if err != nil { return nil, err } } else if signature.Header.JWK != nil { publicKey = signature.Header.JWK } else { return nil, errors.New("missing public key") } sigBytes, err := joseBase64UrlDecode(signature.Signature) if err != nil { return nil, err } err = publicKey.Verify(bytes.NewReader(signBytes), signature.Header.Algorithm, sigBytes) if err != nil { return nil, err } keys[i] = publicKey } return keys, nil } // VerifyChains verifies all the signatures and the chains associated // with each signature and returns the list of verified chains. // Signatures without an x509 chain are not checked. func (js *JSONSignature) VerifyChains(ca *x509.CertPool) ([][]*x509.Certificate, error) { chains := make([][]*x509.Certificate, 0, len(js.signatures)) for _, signature := range js.signatures { signBytes, err := js.signBytes(signature.Protected) if err != nil { return nil, err } var publicKey PublicKey if len(signature.Header.Chain) > 0 { certBytes, err := base64.StdEncoding.DecodeString(signature.Header.Chain[0]) if err != nil { return nil, err } cert, err := x509.ParseCertificate(certBytes) if err != nil { return nil, err } publicKey, err = FromCryptoPublicKey(cert.PublicKey) if err != nil { return nil, err } intermediates := x509.NewCertPool() if len(signature.Header.Chain) > 1 { intermediateChain := signature.Header.Chain[1:] for i := range intermediateChain { certBytes, err := base64.StdEncoding.DecodeString(intermediateChain[i]) if err != nil { return nil, err } intermediate, err := x509.ParseCertificate(certBytes) if err != nil { return nil, err } intermediates.AddCert(intermediate) } } verifyOptions := x509.VerifyOptions{ Intermediates: intermediates, Roots: ca, } verifiedChains, err := cert.Verify(verifyOptions) if err != nil { return nil, err } chains = append(chains, verifiedChains...) sigBytes, err := joseBase64UrlDecode(signature.Signature) if err != nil { return nil, err } err = publicKey.Verify(bytes.NewReader(signBytes), signature.Header.Algorithm, sigBytes) if err != nil { return nil, err } } } return chains, nil } // JWS returns JSON serialized JWS according to // http://tools.ietf.org/html/draft-ietf-jose-json-web-signature-31#section-7.2 func (js *JSONSignature) JWS() ([]byte, error) { if len(js.signatures) == 0 { return nil, errors.New("missing signature") } jsonMap := map[string]interface{}{ "payload": js.payload, "signatures": js.signatures, } return json.MarshalIndent(jsonMap, "", " ") } func notSpace(r rune) bool { return !unicode.IsSpace(r) } func detectJSONIndent(jsonContent []byte) (indent string) { if len(jsonContent) > 2 && jsonContent[0] == '{' && jsonContent[1] == '\n' { quoteIndex := bytes.IndexRune(jsonContent[1:], '"') if quoteIndex > 0 { indent = string(jsonContent[2 : quoteIndex+1]) } } return } type jsParsedHeader struct { JWK json.RawMessage `json:"jwk"` Algorithm string `json:"alg"` Chain []string `json:"x5c"` } type jsParsedSignature struct { Header *jsParsedHeader `json:"header"` Signature string `json:"signature"` Protected string `json:"protected"` } // ParseJWS parses a JWS serialized JSON object into a Json Signature. func ParseJWS(content []byte) (*JSONSignature, error) { type jsParsed struct { Payload string `json:"payload"` Signatures []*jsParsedSignature `json:"signatures"` } parsed := &jsParsed{} err := json.Unmarshal(content, parsed) if err != nil { return nil, err } if len(parsed.Signatures) == 0 { return nil, errors.New("missing signatures") } payload, err := joseBase64UrlDecode(parsed.Payload) if err != nil { return nil, err } js, err := NewJSONSignature(payload) if err != nil { return nil, err } js.signatures = make([]*jsSignature, len(parsed.Signatures)) for i, signature := range parsed.Signatures { header := &jsHeader{ Algorithm: signature.Header.Algorithm, } if signature.Header.Chain != nil { header.Chain = signature.Header.Chain } if signature.Header.JWK != nil { publicKey, err := UnmarshalPublicKeyJWK([]byte(signature.Header.JWK)) if err != nil { return nil, err } header.JWK = publicKey } js.signatures[i] = &jsSignature{ Header: header, Signature: signature.Signature, Protected: signature.Protected, } } return js, nil } // NewJSONSignature returns a new unsigned JWS from a json byte array. // JSONSignature will need to be signed before serializing or storing. func NewJSONSignature(content []byte) (*JSONSignature, error) { var dataMap map[string]interface{} err := json.Unmarshal(content, &dataMap) if err != nil { return nil, err } js := newJSONSignature() js.indent = detectJSONIndent(content) js.payload = joseBase64UrlEncode(content) // Find trailing } and whitespace, put in protected header closeIndex := bytes.LastIndexFunc(content, notSpace) if content[closeIndex] != '}' { return nil, ErrInvalidJSONContent } lastRuneIndex := bytes.LastIndexFunc(content[:closeIndex], notSpace) if content[lastRuneIndex] == ',' { return nil, ErrInvalidJSONContent } js.formatLength = lastRuneIndex + 1 js.formatTail = content[js.formatLength:] return js, nil } // NewJSONSignatureFromMap returns a new unsigned JSONSignature from a map or // struct. JWS will need to be signed before serializing or storing. func NewJSONSignatureFromMap(content interface{}) (*JSONSignature, error) { switch content.(type) { case map[string]interface{}: case struct{}: default: return nil, errors.New("invalid data type") } js := newJSONSignature() js.indent = " " payload, err := json.MarshalIndent(content, "", js.indent) if err != nil { return nil, err } js.payload = joseBase64UrlEncode(payload) // Remove '\n}' from formatted section, put in protected header js.formatLength = len(payload) - 2 js.formatTail = payload[js.formatLength:] return js, nil } func readIntFromMap(key string, m map[string]interface{}) (int, bool) { value, ok := m[key] if !ok { return 0, false } switch v := value.(type) { case int: return v, true case float64: return int(v), true default: return 0, false } } func readStringFromMap(key string, m map[string]interface{}) (v string, ok bool) { value, ok := m[key] if !ok { return "", false } v, ok = value.(string) return } // ParsePrettySignature parses a formatted signature into a // JSON signature. If the signatures are missing the format information // an error is thrown. The formatted signature must be created by // the same method as format signature. func ParsePrettySignature(content []byte, signatureKey string) (*JSONSignature, error) { var contentMap map[string]json.RawMessage err := json.Unmarshal(content, &contentMap) if err != nil { return nil, fmt.Errorf("error unmarshalling content: %s", err) } sigMessage, ok := contentMap[signatureKey] if !ok { return nil, ErrMissingSignatureKey } var signatureBlocks []jsParsedSignature err = json.Unmarshal([]byte(sigMessage), &signatureBlocks) if err != nil { return nil, fmt.Errorf("error unmarshalling signatures: %s", err) } js := newJSONSignature() js.signatures = make([]*jsSignature, len(signatureBlocks)) for i, signatureBlock := range signatureBlocks { protectedBytes, err := joseBase64UrlDecode(signatureBlock.Protected) if err != nil { return nil, fmt.Errorf("base64 decode error: %s", err) } var protectedHeader map[string]interface{} err = json.Unmarshal(protectedBytes, &protectedHeader) if err != nil { return nil, fmt.Errorf("error unmarshalling protected header: %s", err) } formatLength, ok := readIntFromMap("formatLength", protectedHeader) if !ok { return nil, errors.New("missing formatted length") } encodedTail, ok := readStringFromMap("formatTail", protectedHeader) if !ok { return nil, errors.New("missing formatted tail") } formatTail, err := joseBase64UrlDecode(encodedTail) if err != nil { return nil, fmt.Errorf("base64 decode error on tail: %s", err) } if js.formatLength == 0 { js.formatLength = formatLength } else if js.formatLength != formatLength { return nil, errors.New("conflicting format length") } if len(js.formatTail) == 0 { js.formatTail = formatTail } else if bytes.Compare(js.formatTail, formatTail) != 0 { return nil, errors.New("conflicting format tail") } header := &jsHeader{ Algorithm: signatureBlock.Header.Algorithm, Chain: signatureBlock.Header.Chain, } if signatureBlock.Header.JWK != nil { publicKey, err := UnmarshalPublicKeyJWK([]byte(signatureBlock.Header.JWK)) if err != nil { return nil, fmt.Errorf("error unmarshalling public key: %s", err) } header.JWK = publicKey } js.signatures[i] = &jsSignature{ Header: header, Signature: signatureBlock.Signature, Protected: signatureBlock.Protected, } } if js.formatLength > len(content) { return nil, errors.New("invalid format length") } formatted := make([]byte, js.formatLength+len(js.formatTail)) copy(formatted, content[:js.formatLength]) copy(formatted[js.formatLength:], js.formatTail) js.indent = detectJSONIndent(formatted) js.payload = joseBase64UrlEncode(formatted) return js, nil } // PrettySignature formats a json signature into an easy to read // single json serialized object. func (js *JSONSignature) PrettySignature(signatureKey string) ([]byte, error) { if len(js.signatures) == 0 { return nil, errors.New("no signatures") } payload, err := joseBase64UrlDecode(js.payload) if err != nil { return nil, err } payload = payload[:js.formatLength] var marshalled []byte var marshallErr error if js.indent != "" { marshalled, marshallErr = json.MarshalIndent(js.signatures, js.indent, js.indent) } else { marshalled, marshallErr = json.Marshal(js.signatures) } if marshallErr != nil { return nil, marshallErr } buf := bytes.NewBuffer(make([]byte, 0, len(payload)+len(marshalled)+34)) buf.Write(payload) buf.WriteByte(',') if js.indent != "" { buf.WriteByte('\n') buf.WriteString(js.indent) buf.WriteByte('"') buf.WriteString(signatureKey) buf.WriteString("\": ") buf.Write(marshalled) buf.WriteByte('\n') } else { buf.WriteByte('"') buf.WriteString(signatureKey) buf.WriteString("\":") buf.Write(marshalled) } buf.WriteByte('}') return buf.Bytes(), nil }
package monitors import ( "strconv" "github.com/rackspace/gophercloud" ) const ( path = "loadbalancers" monitorPath = "healthmonitor" ) func rootURL(c *gophercloud.ServiceClient, lbID int) string { return c.ServiceURL(path, strconv.Itoa(lbID), monitorPath) }
package com.siyeh.ig.junit; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.InspectionGadgetsFix; import com.siyeh.ig.fixes.RenameFix; import com.siyeh.ig.psiutils.TestUtils; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.List; public class JUnit4AnnotatedMethodInJUnit3TestCaseInspection extends JUnit4AnnotatedMethodInJUnit3TestCaseInspectionBase { @NotNull @Override protected InspectionGadgetsFix[] buildFixes(Object... infos) { final List<InspectionGadgetsFix> fixes = new ArrayList<>(3); final PsiMethod method = (PsiMethod)infos[1]; if (AnnotationUtil.isAnnotated(method, IGNORE, false)) { fixes.add(new RemoveIgnoreAndRename(method)); } if (TestUtils.isJUnit4TestMethod(method)) { String methodName = method.getName(); String newMethodName; if (methodName.startsWith("test")) { newMethodName = null; } else { boolean lowCaseStyle = methodName.contains("_"); newMethodName = "test" + (lowCaseStyle ? "_" + methodName : StringUtil.capitalize(methodName)); } fixes.add(new RemoveTestAnnotationFix(newMethodName)); } final PsiClass aClass = (PsiClass)infos[0]; final String className = aClass.getName(); fixes.add(new ConvertToJUnit4Fix(className)); return fixes.toArray(new InspectionGadgetsFix[fixes.size()]); } private static void deleteAnnotation(ProblemDescriptor descriptor, final String qualifiedName) { final PsiElement element = descriptor.getPsiElement(); final PsiElement parent = element.getParent(); if (!(parent instanceof PsiModifierListOwner)) { return; } final PsiModifierListOwner method = (PsiModifierListOwner)parent; final PsiModifierList modifierList = method.getModifierList(); if (modifierList == null) { return; } final PsiAnnotation annotation = modifierList.findAnnotation(qualifiedName); if (annotation == null) { return; } annotation.delete(); } private static class RemoveIgnoreAndRename extends RenameFix { public RemoveIgnoreAndRename(@NonNls PsiMethod method) { super("_" + method.getName()); } @NotNull @Override public String getName() { return InspectionGadgetsBundle.message("ignore.test.method.in.class.extending.junit3.testcase.quickfix", getTargetName()); } @Override public void doFix(Project project, ProblemDescriptor descriptor) { WriteAction.run(() -> deleteAnnotation(descriptor, IGNORE)); super.doFix(project, descriptor); } } private static class ConvertToJUnit4Fix extends InspectionGadgetsFix { private final String className; ConvertToJUnit4Fix(String className) { this.className = className; } @Override @NotNull public String getName() { return InspectionGadgetsBundle.message("convert.junit3.test.class.quickfix", className); } @NotNull @Override public String getFamilyName() { return "Convert JUnit 3 class to JUnit 4"; } @Override protected void doFix(Project project, ProblemDescriptor descriptor) { final PsiElement element = descriptor.getPsiElement(); final PsiElement parent = element.getParent(); if (!(parent instanceof PsiMember)) { return; } final PsiMember member = (PsiMember)parent; final PsiClass containingClass = member.getContainingClass(); convertJUnit3ClassToJUnit4(containingClass); } } public static void convertJUnit3ClassToJUnit4(PsiClass containingClass) { if (containingClass == null) { return; } final PsiReferenceList extendsList = containingClass.getExtendsList(); if (extendsList == null) { return; } for (PsiMethod method : containingClass.getMethods()) { @NonNls final String name = method.getName(); if (!method.hasModifierProperty(PsiModifier.STATIC) && PsiType.VOID.equals(method.getReturnType()) && method.getParameterList().getParametersCount() == 0) { final PsiModifierList modifierList = method.getModifierList(); if (name.startsWith("test")) { addAnnotationIfNotPresent(modifierList, "org.junit.Test"); } else if (name.equals("setUp")) { transformSetUpOrTearDownMethod(method); addAnnotationIfNotPresent(modifierList, "org.junit.Before"); } else if (name.equals("tearDown")) { transformSetUpOrTearDownMethod(method); addAnnotationIfNotPresent(modifierList, "org.junit.After"); } } method.accept(new MethodCallModifier()); } final PsiJavaCodeReferenceElement[] referenceElements = extendsList.getReferenceElements(); for (PsiJavaCodeReferenceElement referenceElement : referenceElements) { referenceElement.delete(); } } private static void addAnnotationIfNotPresent(PsiModifierList modifierList, String qualifiedAnnotationName) { if (modifierList.findAnnotation(qualifiedAnnotationName) != null) { return; } final PsiAnnotation annotation = modifierList.addAnnotation(qualifiedAnnotationName); final Project project = modifierList.getProject(); final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(project); codeStyleManager.shortenClassReferences(annotation); } private static void transformSetUpOrTearDownMethod(PsiMethod method) { final PsiModifierList modifierList = method.getModifierList(); if (modifierList.hasModifierProperty(PsiModifier.PROTECTED)) { modifierList.setModifierProperty(PsiModifier.PROTECTED, false); } if (!modifierList.hasModifierProperty(PsiModifier.PUBLIC)) { modifierList.setModifierProperty(PsiModifier.PUBLIC, true); } final PsiAnnotation overrideAnnotation = modifierList.findAnnotation("java.lang.Override"); if (overrideAnnotation != null) { overrideAnnotation.delete(); } method.accept(new SuperLifeCycleCallRemover(method.getName())); } private static class SuperLifeCycleCallRemover extends JavaRecursiveElementVisitor { @NotNull private final String myLifeCycleMethodName; private SuperLifeCycleCallRemover(@NotNull String lifeCycleMethodName) { myLifeCycleMethodName = lifeCycleMethodName; } @Override public void visitMethodCallExpression(PsiMethodCallExpression expression) { super.visitMethodCallExpression(expression); final PsiReferenceExpression methodExpression = expression.getMethodExpression(); final String methodName = methodExpression.getReferenceName(); if (!myLifeCycleMethodName.equals(methodName)) { return; } final PsiExpression target = methodExpression.getQualifierExpression(); if (!(target instanceof PsiSuperExpression)) { return; } expression.delete(); } } private static class MethodCallModifier extends JavaRecursiveElementVisitor { @Override public void visitMethodCallExpression(PsiMethodCallExpression expression) { super.visitMethodCallExpression(expression); final PsiReferenceExpression methodExpression = expression.getMethodExpression(); if (methodExpression.getQualifierExpression() != null) { return; } final PsiMethod method = expression.resolveMethod(); if (method == null || !method.hasModifierProperty(PsiModifier.STATIC)) { return; } final PsiClass aClass = method.getContainingClass(); if (aClass == null) { return; } final String name = aClass.getQualifiedName(); if (!"junit.framework.Assert".equals(name) && !"junit.framework.TestCase".equals(name)) { return; } @NonNls final String newExpressionText = "org.junit.Assert." + expression.getText(); final Project project = expression.getProject(); final PsiElementFactory factory = JavaPsiFacade.getElementFactory(project); final PsiExpression newExpression = factory.createExpressionFromText(newExpressionText, expression); final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(project); final PsiElement replacedExpression = expression.replace(newExpression); codeStyleManager.shortenClassReferences(replacedExpression); } } private static class RemoveTestAnnotationFix extends RenameFix { private final String myNewName; public RemoveTestAnnotationFix(String newName) { super(newName); myNewName = newName; } @Override @NotNull public String getFamilyName() { return InspectionGadgetsBundle.message("remove.junit4.test.annotation.quickfix"); } @Override @NotNull public String getName() { return myNewName == null ? getFamilyName() : InspectionGadgetsBundle.message("remove.junit4.test.annotation.and.rename.quickfix", myNewName); } @Override public void doFix(Project project, ProblemDescriptor descriptor) { deleteAnnotation(descriptor, "org.junit.Test"); if (myNewName != null) { super.doFix(project, descriptor); } } } }
fc.applyAll = applyAll; /* Event Date Math -----------------------------------------------------------------------------*/ function exclEndDay(event) { if (event.end) { return _exclEndDay(event.end, event.allDay); }else{ return addDays(cloneDate(event.start), 1); } } function _exclEndDay(end, allDay) { end = cloneDate(end); return allDay || end.getHours() || end.getMinutes() ? addDays(end, 1) : clearTime(end); } function segCmp(a, b) { return (b.msLength - a.msLength) * 100 + (a.event.start - b.event.start); } function segsCollide(seg1, seg2) { return seg1.end > seg2.start && seg1.start < seg2.end; } /* Event Sorting -----------------------------------------------------------------------------*/ // event rendering utilities function sliceSegs(events, visEventEnds, start, end) { var segs = [], i, len=events.length, event, eventStart, eventEnd, segStart, segEnd, isStart, isEnd; for (i=0; i<len; i++) { event = events[i]; eventStart = event.start; eventEnd = visEventEnds[i]; if (eventEnd > start && eventStart < end) { if (eventStart < start) { segStart = cloneDate(start); isStart = false; }else{ segStart = eventStart; isStart = true; } if (eventEnd > end) { segEnd = cloneDate(end); isEnd = false; }else{ segEnd = eventEnd; isEnd = true; } segs.push({ event: event, start: segStart, end: segEnd, isStart: isStart, isEnd: isEnd, msLength: segEnd - segStart }); } } return segs.sort(segCmp); } // event rendering calculation utilities function stackSegs(segs) { var levels = [], i, len = segs.length, seg, j, collide, k; for (i=0; i<len; i++) { seg = segs[i]; j = 0; // the level index where seg should belong while (true) { collide = false; if (levels[j]) { for (k=0; k<levels[j].length; k++) { if (segsCollide(levels[j][k], seg)) { collide = true; break; } } } if (collide) { j++; }else{ break; } } if (levels[j]) { levels[j].push(seg); }else{ levels[j] = [seg]; } } return levels; } /* Event Element Binding -----------------------------------------------------------------------------*/ function lazySegBind(container, segs, bindHandlers) { container.unbind('mouseover').mouseover(function(ev) { var parent=ev.target, e, i, seg; while (parent != this) { e = parent; parent = parent.parentNode; } if ((i = e._fci) !== undefined) { e._fci = undefined; seg = segs[i]; bindHandlers(seg.event, seg.element, seg); $(ev.target).trigger(ev); } ev.stopPropagation(); }); } /* Element Dimensions -----------------------------------------------------------------------------*/ function setOuterWidth(element, width, includeMargins) { for (var i=0, e; i<element.length; i++) { e = $(element[i]); e.width(Math.max(0, width - hsides(e, includeMargins))); } } function setOuterHeight(element, height, includeMargins) { for (var i=0, e; i<element.length; i++) { e = $(element[i]); e.height(Math.max(0, height - vsides(e, includeMargins))); } } function hsides(element, includeMargins) { return hpadding(element) + hborders(element) + (includeMargins ? hmargins(element) : 0); } function hpadding(element) { return (parseFloat($.css(element[0], 'paddingLeft', true)) || 0) + (parseFloat($.css(element[0], 'paddingRight', true)) || 0); } function hmargins(element) { return (parseFloat($.css(element[0], 'marginLeft', true)) || 0) + (parseFloat($.css(element[0], 'marginRight', true)) || 0); } function hborders(element) { return (parseFloat($.css(element[0], 'borderLeftWidth', true)) || 0) + (parseFloat($.css(element[0], 'borderRightWidth', true)) || 0); } function vsides(element, includeMargins) { return vpadding(element) + vborders(element) + (includeMargins ? vmargins(element) : 0); } function vpadding(element) { return (parseFloat($.css(element[0], 'paddingTop', true)) || 0) + (parseFloat($.css(element[0], 'paddingBottom', true)) || 0); } function vmargins(element) { return (parseFloat($.css(element[0], 'marginTop', true)) || 0) + (parseFloat($.css(element[0], 'marginBottom', true)) || 0); } function vborders(element) { return (parseFloat($.css(element[0], 'borderTopWidth', true)) || 0) + (parseFloat($.css(element[0], 'borderBottomWidth', true)) || 0); } function setMinHeight(element, height) { height = (typeof height == 'number' ? height + 'px' : height); element.each(function(i, _element) { _element.style.cssText += ';min-height:' + height + ';_height:' + height; // why can't we just use .css() ? i forget }); } /* Misc Utils -----------------------------------------------------------------------------*/ //TODO: arraySlice //TODO: isFunction, grep ? function noop() { } function cmp(a, b) { return a - b; } function arrayMax(a) { return Math.max.apply(Math, a); } function zeroPad(n) { return (n < 10 ? '0' : '') + n; } function smartProperty(obj, name) { // get a camel-cased/namespaced property of an object if (obj[name] !== undefined) { return obj[name]; } var parts = name.split(/(?=[A-Z])/), i=parts.length-1, res; for (; i>=0; i--) { res = obj[parts[i].toLowerCase()]; if (res !== undefined) { return res; } } return obj['']; } function htmlEscape(s) { return s.replace(/&/g, '&amp;') .replace(/</g, '&lt;') .replace(/>/g, '&gt;') .replace(/'/g, '&#039;') .replace(/"/g, '&quot;') .replace(/\n/g, '<br />'); } function cssKey(_element) { return _element.id + '/' + _element.className + '/' + _element.style.cssText.replace(/(^|;)\s*(top|left|width|height)\s*:[^;]*/ig, ''); } function disableTextSelection(element) { element .attr('unselectable', 'on') .css('MozUserSelect', 'none') .bind('selectstart.ui', function() { return false; }); } /* function enableTextSelection(element) { element .attr('unselectable', 'off') .css('MozUserSelect', '') .unbind('selectstart.ui'); } */ function markFirstLast(e) { e.children() .removeClass('fc-first fc-last') .filter(':first-child') .addClass('fc-first') .end() .filter(':last-child') .addClass('fc-last'); } function setDayID(cell, date) { cell.each(function(i, _cell) { _cell.className = _cell.className.replace(/^fc-\w*/, 'fc-' + dayIDs[date.getDay()]); // TODO: make a way that doesn't rely on order of classes }); } function getSkinCss(event, opt) { var source = event.source || {}; var eventColor = event.color; var sourceColor = source.color; var optionColor = opt('eventColor'); var backgroundColor = event.backgroundColor || eventColor || source.backgroundColor || sourceColor || opt('eventBackgroundColor') || optionColor; var borderColor = event.borderColor || eventColor || source.borderColor || sourceColor || opt('eventBorderColor') || optionColor; var textColor = event.textColor || source.textColor || opt('eventTextColor'); var statements = []; if (backgroundColor) { statements.push('background-color:' + backgroundColor); } if (borderColor) { statements.push('border-color:' + borderColor); } if (textColor) { statements.push('color:' + textColor); } return statements.join(';'); } function applyAll(functions, thisObj, args) { if ($.isFunction(functions)) { functions = [ functions ]; } if (functions) { var i; var ret; for (i=0; i<functions.length; i++) { ret = functions[i].apply(thisObj, args) || ret; } return ret; } } function firstDefined() { for (var i=0; i<arguments.length; i++) { if (arguments[i] !== undefined) { return arguments[i]; } } }
<?php namespace ZendTest\Navigation\Page; use PHPUnit_Framework_TestCase as TestCase; use Zend\Mvc\Router\RouteMatch; use Zend\Mvc\Router\Http\Regex as RegexRoute; use Zend\Mvc\Router\Http\Literal as LiteralRoute; use Zend\Mvc\Router\Http\TreeRouteStack; use Zend\Mvc\ModuleRouteListener; use Zend\Mvc\MvcEvent; use Zend\Navigation\Page; use Zend\Navigation; use ZendTest\Navigation\TestAsset; /** * Tests the class Zend_Navigation_Page_Mvc * * @category Zend * @package Zend_Navigation * @subpackage UnitTests * @group Zend_Navigation */ class MvcTest extends TestCase { protected function setUp() { $this->route = new RegexRoute( '((?<controller>[^/]+)(/(?<action>[^/]+))?)', '/%controller%/%action%', array( 'controller' => 'index', 'action' => 'index', ) ); $this->router = new TreeRouteStack(); $this->router->addRoute('default', $this->route); $this->routeMatch = new RouteMatch(array()); $this->routeMatch->setMatchedRouteName('default'); } protected function tearDown() { } public function testHrefGeneratedByRouterRequiresNoRoute() { $page = new Page\Mvc(array( 'label' => 'foo', 'action' => 'index', 'controller' => 'index' )); $page->setRouteMatch($this->routeMatch); $page->setRouter($this->router); $page->setAction('view'); $page->setController('news'); $this->assertEquals('/news/view', $page->getHref()); } public function testHrefGeneratedIsRouteAware() { $page = new Page\Mvc(array( 'label' => 'foo', 'action' => 'myaction', 'controller' => 'mycontroller', 'route' => 'myroute', 'params' => array( 'page' => 1337 ) )); $route = new RegexRoute( '(lolcat/(?<action>[^/]+)/(?<page>\d+))', '/lolcat/%action%/%page%', array( 'controller' => 'foobar', 'action' => 'bazbat', 'page' => 1 ) ); $router = new TreeRouteStack(); $router->addRoute('myroute', $route); $routeMatch = new RouteMatch(array( 'controller' => 'foobar', 'action' => 'bazbat', 'page' => 1, )); $page->setRouter($router); $page->setRouteMatch($routeMatch); $this->assertEquals('/lolcat/myaction/1337', $page->getHref()); } public function testIsActiveReturnsTrueWhenMatchingRoute() { $page = new Page\Mvc(array( 'label' => 'spiffyjrwashere', 'route' => 'lolfish' )); $route = new LiteralRoute('/lolfish'); $router = new TreeRouteStack; $router->addRoute('lolfish', $route); $routeMatch = new RouteMatch(array()); $routeMatch->setMatchedRouteName('lolfish'); $page->setRouter($router); $page->setRouteMatch($routeMatch); $this->assertEquals(true, $page->isActive()); } public function testIsActiveReturnsTrueWhenMatchingRouteWhileUsingModuleRouteListener() { $page = new Page\Mvc(array( 'label' => 'mpinkstonwashere', 'route' => 'roflcopter', 'controller' => 'index' )); $route = new LiteralRoute('/roflcopter'); $router = new TreeRouteStack; $router->addRoute('roflcopter', $route); $routeMatch = new RouteMatch(array( ModuleRouteListener::MODULE_NAMESPACE => 'Application\Controller', 'controller' => 'index' )); $routeMatch->setMatchedRouteName('roflcopter'); $event = new MvcEvent(); $event->setRouter($router) ->setRouteMatch($routeMatch); $moduleRouteListener = new ModuleRouteListener(); $moduleRouteListener->onRoute($event); $page->setRouter($event->getRouter()); $page->setRouteMatch($event->getRouteMatch()); $this->assertEquals(true, $page->isActive()); } public function testIsActiveReturnsFalseWhenMatchingRouteButNonMatchingParams() { $page = new Page\Mvc(array( 'label' => 'foo', 'route' => 'bar', 'action' => 'baz', )); $routeMatch = new RouteMatch(array()); $routeMatch->setMatchedRouteName('bar'); $routeMatch->setParam('action', 'qux'); $page->setRouteMatch($routeMatch); $this->assertFalse($page->isActive()); } public function testIsActiveReturnsFalseWhenNoRouteAndNoMatchedRouteNameIsSet() { $page = new Page\Mvc(); $routeMatch = new RouteMatch(array()); $page->setRouteMatch($routeMatch); $this->assertFalse($page->isActive()); } /** * @group ZF-8922 */ public function testGetHrefWithFragmentIdentifier() { $page = new Page\Mvc(array( 'label' => 'foo', 'fragment' => 'qux', 'controller' => 'mycontroller', 'action' => 'myaction', 'route' => 'myroute', 'params' => array( 'page' => 1337 ) )); $route = new RegexRoute( '(lolcat/(?<action>[^/]+)/(?<page>\d+))', '/lolcat/%action%/%page%', array( 'controller' => 'foobar', 'action' => 'bazbat', 'page' => 1, ) ); $this->router->addRoute('myroute', $route); $this->routeMatch->setMatchedRouteName('myroute'); $page->setRouteMatch($this->routeMatch); $page->setRouter($this->router); $this->assertEquals('/lolcat/myaction/1337#qux', $page->getHref()); } public function testIsActiveReturnsTrueOnIdenticalControllerAction() { $page = new Page\Mvc(array( 'action' => 'index', 'controller' => 'index' )); $routeMatch = new RouteMatch(array( 'controller' => 'index', 'action' => 'index', )); $page->setRouteMatch($routeMatch); $this->assertTrue($page->isActive()); } public function testIsActiveReturnsFalseOnDifferentControllerAction() { $page = new Page\Mvc(array( 'action' => 'bar', 'controller' => 'index' )); $routeMatch = new RouteMatch(array( 'controller' => 'index', 'action' => 'index', )); $page->setRouteMatch($routeMatch); $this->assertFalse($page->isActive()); } public function testIsActiveReturnsTrueOnIdenticalIncludingPageParams() { $page = new Page\Mvc(array( 'label' => 'foo', 'action' => 'view', 'controller' => 'post', 'params' => array( 'id' => '1337' ) )); $routeMatch = new RouteMatch(array( 'controller' => 'post', 'action' => 'view', 'id' => '1337' )); $page->setRouteMatch($routeMatch); $this->assertTrue($page->isActive()); } public function testIsActiveReturnsTrueWhenRequestHasMoreParams() { $page = new Page\Mvc(array( 'label' => 'foo', 'action' => 'view', 'controller' => 'post', )); $routeMatch = new RouteMatch(array( 'controller' => 'post', 'action' => 'view', 'id' => '1337', )); $page->setRouteMatch($routeMatch); $this->assertTrue($page->isActive()); } public function testIsActiveReturnsFalseWhenRequestHasLessParams() { $page = new Page\Mvc(array( 'label' => 'foo', 'action' => 'view', 'controller' => 'post', 'params' => array( 'id' => '1337' ) )); $routeMatch = new RouteMatch(array( 'controller' => 'post', 'action' => 'view', 'id' => null )); $page->setRouteMatch($routeMatch); $this->assertFalse($page->isActive()); } public function testActionAndControllerAccessors() { $page = new Page\Mvc(array( 'label' => 'foo', 'action' => 'index', 'controller' => 'index' )); $props = array('Action', 'Controller'); $valids = array('index', 'help', 'home', 'default', '1', ' ', '', null); $invalids = array(42, (object) null); foreach ($props as $prop) { $setter = "set$prop"; $getter = "get$prop"; foreach ($valids as $valid) { $page->$setter($valid); $this->assertEquals($valid, $page->$getter()); } foreach ($invalids as $invalid) { try { $page->$setter($invalid); $msg = "'$invalid' is invalid for $setter(), but no "; $msg .= 'Zend\Navigation\Exception\InvalidArgumentException was thrown'; $this->fail($msg); } catch (Navigation\Exception\InvalidArgumentException $e) { } } } } public function testRouteAccessor() { $page = new Page\Mvc(array( 'label' => 'foo', 'action' => 'index', 'controller' => 'index' )); $props = array('Route'); $valids = array('index', 'help', 'home', 'default', '1', ' ', null); $invalids = array(42, (object) null); foreach ($props as $prop) { $setter = "set$prop"; $getter = "get$prop"; foreach ($valids as $valid) { $page->$setter($valid); $this->assertEquals($valid, $page->$getter()); } foreach ($invalids as $invalid) { try { $page->$setter($invalid); $msg = "'$invalid' is invalid for $setter(), but no "; $msg .= 'Zend\Navigation\Exception\InvalidArgumentException was thrown'; $this->fail($msg); } catch (Navigation\Exception\InvalidArgumentException $e) { } } } } public function testSetAndGetParams() { $page = new Page\Mvc(array( 'label' => 'foo', 'action' => 'index', 'controller' => 'index' )); $params = array('foo' => 'bar', 'baz' => 'bat'); $page->setParams($params); $this->assertEquals($params, $page->getParams()); $page->setParams(); $this->assertEquals(array(), $page->getParams()); $page->setParams($params); $this->assertEquals($params, $page->getParams()); $page->setParams(array()); $this->assertEquals(array(), $page->getParams()); } public function testToArrayMethod() { $options = array( 'label' => 'foo', 'action' => 'index', 'controller' => 'index', 'fragment' => 'bar', 'id' => 'my-id', 'class' => 'my-class', 'title' => 'my-title', 'target' => 'my-target', 'order' => 100, 'active' => true, 'visible' => false, 'foo' => 'bar', 'meaning' => 42, 'router' => $this->router, 'route_match' => $this->routeMatch, ); $page = new Page\Mvc($options); $toArray = $page->toArray(); $options['route'] = null; $options['params'] = array(); $options['rel'] = array(); $options['rev'] = array(); $options['privilege'] = null; $options['resource'] = null; $options['pages'] = array(); $options['type'] = 'Zend\Navigation\Page\Mvc'; ksort($options); ksort($toArray); $this->assertEquals($options, $toArray); } public function testSpecifyingAnotherUrlHelperToGenerateHrefs() { $newRouter = new TestAsset\Router(); $page = new Page\Mvc(array( 'route' => 'default' )); $page->setRouter($newRouter); $expected = TestAsset\Router::RETURN_URL; $actual = $page->getHref(); $this->assertEquals($expected, $actual); } public function testDefaultRouterCanBeSetWithConstructor() { $page = new Page\Mvc(array( 'label' => 'foo', 'action' => 'index', 'controller' => 'index', 'defaultRouter' => $this->router )); $this->assertEquals($this->router, $page->getDefaultRouter()); $page->setDefaultRouter(null); } public function testDefaultRouterCanBeSetWithGetter() { $page = new Page\Mvc(array( 'label' => 'foo', 'action' => 'index', 'controller' => 'index', )); $page->setDefaultRouter($this->router); $this->assertEquals($this->router, $page->getDefaultRouter()); $page->setDefaultRouter(null); } public function testNoExceptionForGetHrefIfDefaultRouterIsSet() { $page = new Page\Mvc(array( 'label' => 'foo', 'action' => 'index', 'controller' => 'index', 'route' => 'default', 'defaultRouter' => $this->router )); // If the default router is not used an exception will be thrown. // This method intentionally has no assertion. $page->getHref(); $page->setDefaultRouter(null); } }
#include "talk/xmpp/xmpptask.h" #include "talk/xmpp/xmppclient.h" #include "talk/xmpp/xmppengine.h" #include "talk/xmpp/constants.h" #include "talk/xmpp/ratelimitmanager.h" namespace buzz { RateLimitManager task_rate_manager; XmppTask::XmppTask(TaskParent* parent, XmppEngine::HandlerLevel level) : Task(parent), client_(NULL) { #ifdef _DEBUG debug_force_timeout_ = false; #endif XmppClient* client = static_cast<XmppClient*>(parent->GetParent(XMPP_CLIENT_TASK_CODE)); client_ = client; id_ = client->NextId(); client->AddXmppTask(this, level); client->SignalDisconnected.connect(this, &XmppTask::OnDisconnect); } XmppTask::~XmppTask() { StopImpl(); } void XmppTask::StopImpl() { while (NextStanza() != NULL) {} if (client_) { client_->RemoveXmppTask(this); client_->SignalDisconnected.disconnect(this); client_ = NULL; } } XmppReturnStatus XmppTask::SendStanza(const XmlElement* stanza) { if (client_ == NULL) return XMPP_RETURN_BADSTATE; return client_->SendStanza(stanza); } XmppReturnStatus XmppTask::SendStanzaError(const XmlElement* element_original, XmppStanzaError code, const std::string& text) { if (client_ == NULL) return XMPP_RETURN_BADSTATE; return client_->SendStanzaError(element_original, code, text); } void XmppTask::Stop() { StopImpl(); Task::Stop(); } void XmppTask::OnDisconnect() { Error(); } void XmppTask::QueueStanza(const XmlElement* stanza) { #ifdef _DEBUG if (debug_force_timeout_) return; #endif stanza_queue_.push_back(new XmlElement(*stanza)); Wake(); } const XmlElement* XmppTask::NextStanza() { XmlElement* result = NULL; if (!stanza_queue_.empty()) { result = stanza_queue_.front(); stanza_queue_.pop_front(); } next_stanza_.reset(result); return result; } XmlElement* XmppTask::MakeIq(const std::string& type, const buzz::Jid& to, const std::string& id) { XmlElement* result = new XmlElement(QN_IQ); if (!type.empty()) result->AddAttr(QN_TYPE, type); if (to != JID_EMPTY) result->AddAttr(QN_TO, to.Str()); if (!id.empty()) result->AddAttr(QN_ID, id); return result; } XmlElement* XmppTask::MakeIqResult(const XmlElement * query) { XmlElement* result = new XmlElement(QN_IQ); result->AddAttr(QN_TYPE, STR_RESULT); if (query->HasAttr(QN_FROM)) { result->AddAttr(QN_TO, query->Attr(QN_FROM)); } result->AddAttr(QN_ID, query->Attr(QN_ID)); return result; } bool XmppTask::MatchResponseIq(const XmlElement* stanza, const Jid& to, const std::string& id) { if (stanza->Name() != QN_IQ) return false; if (stanza->Attr(QN_ID) != id) return false; Jid from(stanza->Attr(QN_FROM)); if (from == to) return true; // We address the server as "", check if we are doing so here. if (to != JID_EMPTY) return false; // It is legal for the server to identify itself with "domain" or // "myself@domain" Jid me = client_->jid(); return (from == Jid(me.domain())) || (from == me.BareJid()); } bool XmppTask::MatchRequestIq(const XmlElement* stanza, const std::string& type, const QName& qn) { if (stanza->Name() != QN_IQ) return false; if (stanza->Attr(QN_TYPE) != type) return false; if (stanza->FirstNamed(qn) == NULL) return false; return true; } bool XmppTask::VerifyTaskRateLimit(const std::string task_name, int max_count, int per_x_seconds) { return task_rate_manager.VerifyRateLimit(task_name, max_count, per_x_seconds); } }
package org.apache.beam.sdk.io.rabbitmq; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.rabbitmq.client.AMQP; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.ConnectionFactory; import com.rabbitmq.client.Method; import com.rabbitmq.client.ShutdownSignalException; import java.io.Serializable; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.io.common.NetworkTestHelper; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptors; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Throwables; import org.apache.qpid.server.SystemLauncher; import org.apache.qpid.server.model.SystemConfig; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.rules.Timeout; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Test of {@link RabbitMqIO}. */ @RunWith(JUnit4.class) public class RabbitMqIOTest implements Serializable { private static int port; private static String defaultPort; @ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder(); @ClassRule public static Timeout classTimeout = new Timeout(5, TimeUnit.MINUTES); @Rule public transient TestPipeline p = TestPipeline.create(); private static transient SystemLauncher launcher; @BeforeClass public static void beforeClass() throws Exception { port = NetworkTestHelper.getAvailableLocalPort(); defaultPort = System.getProperty("qpid.amqp_port"); System.setProperty("qpid.amqp_port", "" + port); System.setProperty("derby.stream.error.field", "MyApp.DEV_NULL"); // see https://stackoverflow.com/a/49234754/796064 for qpid setup launcher = new SystemLauncher(); Map<String, Object> attributes = new HashMap<>(); URL initialConfig = RabbitMqIOTest.class.getResource("rabbitmq-io-test-config.json"); attributes.put("type", "Memory"); attributes.put("initialConfigurationLocation", initialConfig.toExternalForm()); attributes.put(SystemConfig.DEFAULT_QPID_WORK_DIR, temporaryFolder.newFolder().toString()); launcher.startup(attributes); } @AfterClass public static void afterClass() { launcher.shutdown(); if (defaultPort != null) { System.setProperty("qpid.amqp_port", defaultPort); } else { System.clearProperty("qpid.amqp_port"); } } @Test public void testReadQueue() throws Exception { final int maxNumRecords = 10; PCollection<RabbitMqMessage> raw = p.apply( RabbitMqIO.read() .withUri("amqp://guest:guest@localhost:" + port) .withQueue("READ") .withMaxNumRecords(maxNumRecords)); PCollection<String> output = raw.apply( MapElements.into(TypeDescriptors.strings()) .via( (RabbitMqMessage message) -> RabbitMqTestUtils.recordToString(message.getBody()))); List<String> records = RabbitMqTestUtils.generateRecords(maxNumRecords).stream() .map(RabbitMqTestUtils::recordToString) .collect(Collectors.toList()); PAssert.that(output).containsInAnyOrder(records); ConnectionFactory connectionFactory = new ConnectionFactory(); connectionFactory.setUri("amqp://guest:guest@localhost:" + port); Connection connection = null; Channel channel = null; try { connection = connectionFactory.newConnection(); channel = connection.createChannel(); channel.queueDeclare("READ", false, false, false, null); for (String record : records) { channel.basicPublish("", "READ", null, record.getBytes(StandardCharsets.UTF_8)); } p.run(); } finally { if (channel != null) { channel.close(); } if (connection != null) { connection.close(); } } } /** * Helper for running tests against an exchange. * * <p>This function will automatically specify (and overwrite) the uri and numRecords values of * the Read definition. */ private void doExchangeTest(ExchangeTestPlan testPlan, boolean simulateIncompatibleExchange) throws Exception { final byte[] terminalRecord = new byte[0]; final String uri = "amqp://guest:guest@localhost:" + port; RabbitMqIO.Read read = testPlan.getRead(); PCollection<RabbitMqMessage> raw = p.apply(read.withUri(uri).withMaxNumRecords(testPlan.getNumRecords())); PCollection<String> result = raw.apply( MapElements.into(TypeDescriptors.strings()) .via( (RabbitMqMessage message) -> RabbitMqTestUtils.recordToString(message.getBody()))); List<String> expected = testPlan.expectedResults(); PAssert.that(result).containsInAnyOrder(expected); // on UUID fallback: tests appear to execute concurrently in jenkins, so // exchanges and queues between tests must be distinct String exchange = Optional.ofNullable(read.exchange()).orElseGet(() -> UUID.randomUUID().toString()); String exchangeType = Optional.ofNullable(read.exchangeType()).orElse("fanout"); if (simulateIncompatibleExchange) { // Rabbit will fail when attempting to declare an existing exchange that // has different properties (e.g. declaring a non-durable exchange if // an existing one is durable). QPid does not exhibit this behavior. To // simulate the error condition where RabbitMqIO attempts to declare an // incompatible exchange, we instead declare an exchange with the same // name but of a different type. Both Rabbit and QPid will fail this. if ("fanout".equalsIgnoreCase(exchangeType)) { exchangeType = "direct"; } else { exchangeType = "fanout"; } } final String finalExchangeType = exchangeType; final CountDownLatch waitForExchangeToBeDeclared = new CountDownLatch(1); final BlockingQueue<byte[]> recordsToPublish = new LinkedBlockingQueue<>(); recordsToPublish.addAll(RabbitMqTestUtils.generateRecords(testPlan.getNumRecordsToPublish())); Thread publisher = new Thread( () -> { Connection connection = null; Channel channel = null; try { ConnectionFactory connectionFactory = new ConnectionFactory(); connectionFactory.setAutomaticRecoveryEnabled(false); connectionFactory.setUri(uri); connection = connectionFactory.newConnection(); channel = connection.createChannel(); channel.exchangeDeclare(exchange, finalExchangeType); // We are relying on the pipeline to declare the queue and messages that are // published without a queue being declared are "unroutable". Since there is a race // between when the pipeline declares and when we can start publishing, we add a // handler to republish messages that are returned to us. channel.addReturnListener( (replyCode, replyText, exchange1, routingKey, properties, body) -> { try { recordsToPublish.put(body); } catch (Exception e) { throw new RuntimeException(e); } }); waitForExchangeToBeDeclared.countDown(); while (true) { byte[] record = recordsToPublish.take(); if (record == terminalRecord) { return; } channel.basicPublish( exchange, testPlan.publishRoutingKeyGen().get(), true, // ensure that messages are returned to sender testPlan.getPublishProperties(), record); } } catch (Exception e) { throw new RuntimeException(e); } finally { if (channel != null) { // channel may have already been closed automatically due to protocol failure try { channel.close(); } catch (Exception e) { /* ignored */ } } if (connection != null) { // connection may have already been closed automatically due to protocol failure try { connection.close(); } catch (Exception e) { /* ignored */ } } } }); publisher.start(); waitForExchangeToBeDeclared.await(); p.run(); recordsToPublish.put(terminalRecord); publisher.join(); } private void doExchangeTest(ExchangeTestPlan testPlan) throws Exception { doExchangeTest(testPlan, false); } @Test public void testReadDeclaredFanoutExchange() throws Exception { doExchangeTest( new ExchangeTestPlan( RabbitMqIO.read().withExchange("DeclaredFanoutExchange", "fanout", "ignored"), 10)); } @Test public void testReadDeclaredTopicExchangeWithQueueDeclare() throws Exception { doExchangeTest( new ExchangeTestPlan( RabbitMqIO.read() .withExchange("DeclaredTopicExchangeWithQueueDeclare", "topic", "#") .withQueue("declared-queue") .withQueueDeclare(true), 10)); } @Test public void testReadDeclaredTopicExchange() throws Exception { final int numRecords = 10; RabbitMqIO.Read read = RabbitMqIO.read().withExchange("DeclaredTopicExchange", "topic", "user.create.#"); final Supplier<String> publishRoutingKeyGen = new Supplier<String>() { private AtomicInteger counter = new AtomicInteger(0); @Override public String get() { int count = counter.getAndIncrement(); if (count % 2 == 0) { return "user.create." + count; } return "user.delete." + count; } }; ExchangeTestPlan plan = new ExchangeTestPlan(read, numRecords / 2, numRecords) { @Override public Supplier<String> publishRoutingKeyGen() { return publishRoutingKeyGen; } @Override public List<String> expectedResults() { return IntStream.range(0, numRecords) .filter(i -> i % 2 == 0) .mapToObj(RabbitMqTestUtils::generateRecord) .map(RabbitMqTestUtils::recordToString) .collect(Collectors.toList()); } }; doExchangeTest(plan); } @Test public void testDeclareIncompatibleExchangeFails() throws Exception { RabbitMqIO.Read read = RabbitMqIO.read().withExchange("IncompatibleExchange", "direct", "unused"); try { doExchangeTest(new ExchangeTestPlan(read, 1), true); fail("Expected to have failed to declare an incompatible exchange"); } catch (Exception e) { Throwable cause = Throwables.getRootCause(e); if (cause instanceof ShutdownSignalException) { ShutdownSignalException sse = (ShutdownSignalException) cause; Method reason = sse.getReason(); if (reason instanceof com.rabbitmq.client.AMQP.Connection.Close) { com.rabbitmq.client.AMQP.Connection.Close close = (com.rabbitmq.client.AMQP.Connection.Close) reason; assertEquals("Expected failure is 530: not-allowed", 530, close.getReplyCode()); } else { fail( "Unexpected ShutdownSignalException reason. Expected Connection.Close. Got: " + reason); } } else { fail("Expected to fail with ShutdownSignalException. Instead failed with " + cause); } } } @Test public void testUseCorrelationIdSucceedsWhenIdsPresent() throws Exception { int messageCount = 1; AMQP.BasicProperties publishProps = new AMQP.BasicProperties().builder().correlationId("123").build(); doExchangeTest( new ExchangeTestPlan( RabbitMqIO.read() .withExchange("CorrelationIdSuccess", "fanout") .withUseCorrelationId(true), messageCount, messageCount, publishProps)); } @Test(expected = Pipeline.PipelineExecutionException.class) public void testUseCorrelationIdFailsWhenIdsMissing() throws Exception { int messageCount = 1; AMQP.BasicProperties publishProps = null; doExchangeTest( new ExchangeTestPlan( RabbitMqIO.read() .withExchange("CorrelationIdFailure", "fanout") .withUseCorrelationId(true), messageCount, messageCount, publishProps)); } @Test(expected = Pipeline.PipelineExecutionException.class) public void testQueueDeclareWithoutQueueNameFails() throws Exception { RabbitMqIO.Read read = RabbitMqIO.read().withQueueDeclare(true); doExchangeTest(new ExchangeTestPlan(read, 1)); } @Test public void testWriteQueue() throws Exception { final int maxNumRecords = 1000; List<RabbitMqMessage> data = RabbitMqTestUtils.generateRecords(maxNumRecords).stream() .map(RabbitMqMessage::new) .collect(Collectors.toList()); p.apply(Create.of(data)) .apply( RabbitMqIO.write().withUri("amqp://guest:guest@localhost:" + port).withQueue("TEST")); ConnectionFactory connectionFactory = new ConnectionFactory(); connectionFactory.setUri("amqp://guest:guest@localhost:" + port); Connection connection = null; Channel channel = null; try { connection = connectionFactory.newConnection(); channel = connection.createChannel(); channel.queueDeclare("TEST", true, false, false, null); RabbitMqTestUtils.TestConsumer consumer = new RabbitMqTestUtils.TestConsumer(channel); channel.basicConsume("TEST", true, consumer); p.run(); while (consumer.getReceived().size() < maxNumRecords) { Thread.sleep(500); } assertEquals(maxNumRecords, consumer.getReceived().size()); for (int i = 0; i < maxNumRecords; i++) { assertTrue(consumer.getReceived().contains("Test " + i)); } } finally { if (channel != null) { channel.close(); } if (connection != null) { connection.close(); } } } @Test public void testWriteExchange() throws Exception { final int maxNumRecords = 1000; List<RabbitMqMessage> data = RabbitMqTestUtils.generateRecords(maxNumRecords).stream() .map(RabbitMqMessage::new) .collect(Collectors.toList()); p.apply(Create.of(data)) .apply( RabbitMqIO.write() .withUri("amqp://guest:guest@localhost:" + port) .withExchange("WRITE", "fanout")); ConnectionFactory connectionFactory = new ConnectionFactory(); connectionFactory.setUri("amqp://guest:guest@localhost:" + port); Connection connection = null; Channel channel = null; try { connection = connectionFactory.newConnection(); channel = connection.createChannel(); channel.exchangeDeclare("WRITE", "fanout"); String queueName = channel.queueDeclare().getQueue(); channel.queueBind(queueName, "WRITE", ""); RabbitMqTestUtils.TestConsumer consumer = new RabbitMqTestUtils.TestConsumer(channel); channel.basicConsume(queueName, true, consumer); p.run(); while (consumer.getReceived().size() < maxNumRecords) { Thread.sleep(500); } assertEquals(maxNumRecords, consumer.getReceived().size()); for (int i = 0; i < maxNumRecords; i++) { assertTrue(consumer.getReceived().contains("Test " + i)); } } finally { if (channel != null) { channel.close(); } if (connection != null) { connection.close(); } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.resourcegroups; import com.google.inject.Injector; import io.airlift.bootstrap.Bootstrap; import io.airlift.json.JsonModule; import io.trino.spi.memory.ClusterMemoryPoolManager; import io.trino.spi.resourcegroups.ResourceGroupConfigurationManager; import io.trino.spi.resourcegroups.ResourceGroupConfigurationManagerContext; import io.trino.spi.resourcegroups.ResourceGroupConfigurationManagerFactory; import java.util.Map; public class FileResourceGroupConfigurationManagerFactory implements ResourceGroupConfigurationManagerFactory { @Override public String getName() { return "file"; } @Override public ResourceGroupConfigurationManager<?> create(Map<String, String> config, ResourceGroupConfigurationManagerContext context) { Bootstrap app = new Bootstrap( new JsonModule(), new FileResourceGroupsModule(), binder -> binder.bind(ClusterMemoryPoolManager.class).toInstance(context.getMemoryPoolManager())); Injector injector = app .doNotInitializeLogging() .setRequiredConfigurationProperties(config) .initialize(); return injector.getInstance(FileResourceGroupConfigurationManager.class); } }
class MissingPluginReporter { public: // This must be sync'd with histogram values. enum PluginType { WINDOWS_MEDIA_PLAYER = 0, SILVERLIGHT = 1, REALPLAYER = 2, JAVA = 3, QUICKTIME = 4, OTHER = 5 }; // Sends UMA data, i.e. plugin's type. class UMASender { public: virtual ~UMASender() {} virtual void SendPluginUMA(PluginType plugin_type) = 0; }; // Returns singleton instance. static MissingPluginReporter* GetInstance(); void ReportPluginMissing(std::string plugin_mime_type, const GURL& plugin_src); // Used in testing. void SetUMASender(UMASender* sender); private: friend struct DefaultSingletonTraits<MissingPluginReporter>; MissingPluginReporter(); ~MissingPluginReporter(); static bool CompareCStrings(const char* first, const char* second); bool CStringArrayContainsCString(const char** array, size_t array_size, const char* str); // Extracts file extension from url. void ExtractFileExtension(const GURL& src, std::string* extension); // Converts plugin's src to plugin type. PluginType SrcToPluginType(const GURL& src); // Converts plugin's mime type to plugin type. PluginType MimeTypeToPluginType(const std::string& mime_type); scoped_ptr<UMASender> report_sender_; DISALLOW_COPY_AND_ASSIGN(MissingPluginReporter); }; #endif // CHROME_RENDERER_PLUGINS_PLUGIN_UMA_H_
/** * Gets state from History. * But IE11 throws if there is no state. * * @param {!History} history * @return {*} */ export function getState(history) { try { return history.state; } catch (e) { return null; } }
package VMOMI::VirtualMachineMemoryAllocationPolicy; use parent 'VMOMI::SimpleType'; use strict; use warnings; 1;
//================================================================================================= //================================================================================================= //************************************************************************************************* // Includes //************************************************************************************************* #include <cstdlib> #include <iostream> #include <blaze/math/CompressedMatrix.h> #include <blaze/math/DynamicMatrix.h> #include <blaze/math/SymmetricMatrix.h> #include <blaze/math/UpperMatrix.h> #include <blazetest/mathtest/Creator.h> #include <blazetest/mathtest/dmatsmatmult/OperationTest.h> #include <blazetest/system/MathTest.h> //================================================================================================= // // MAIN FUNCTION // //================================================================================================= //************************************************************************************************* int main() { std::cout << " Running 'SDaUCa'..." << std::endl; using blazetest::mathtest::TypeA; try { // Matrix type definitions typedef blaze::SymmetricMatrix< blaze::DynamicMatrix<TypeA> > SDa; typedef blaze::UpperMatrix< blaze::CompressedMatrix<TypeA> > UCa; // Creator type definitions typedef blazetest::Creator<SDa> CSDa; typedef blazetest::Creator<UCa> CUCa; // Running tests with small matrices for( size_t i=0UL; i<=6UL; ++i ) { for( size_t j=0UL; j<=UCa::maxNonZeros( i ); ++j ) { RUN_DMATSMATMULT_OPERATION_TEST( CSDa( i ), CUCa( i, j ) ); } } // Running tests with large matrices RUN_DMATSMATMULT_OPERATION_TEST( CSDa( 31UL ), CUCa( 31UL, 7UL ) ); RUN_DMATSMATMULT_OPERATION_TEST( CSDa( 67UL ), CUCa( 67UL, 7UL ) ); RUN_DMATSMATMULT_OPERATION_TEST( CSDa( 127UL ), CUCa( 127UL, 13UL ) ); RUN_DMATSMATMULT_OPERATION_TEST( CSDa( 32UL ), CUCa( 32UL, 8UL ) ); RUN_DMATSMATMULT_OPERATION_TEST( CSDa( 64UL ), CUCa( 64UL, 8UL ) ); RUN_DMATSMATMULT_OPERATION_TEST( CSDa( 128UL ), CUCa( 128UL, 16UL ) ); } catch( std::exception& ex ) { std::cerr << "\n\n ERROR DETECTED during dense matrix/sparse matrix multiplication:\n" << ex.what() << "\n"; return EXIT_FAILURE; } return EXIT_SUCCESS; } //*************************************************************************************************
"use strict"; const conversions = require("webidl-conversions"); const utils = require("./utils.js"); const HTMLElement = require("./HTMLElement.js"); const impl = utils.implSymbol; function HTMLProgressElement() { throw new TypeError("Illegal constructor"); } HTMLProgressElement.prototype = Object.create(HTMLElement.interface.prototype); HTMLProgressElement.prototype.constructor = HTMLProgressElement; HTMLProgressElement.prototype.toString = function () { if (this === HTMLProgressElement.prototype) { return "[object HTMLProgressElementPrototype]"; } return HTMLElement.interface.prototype.toString.call(this); }; const iface = { mixedInto: [], is(obj) { if (obj) { if (obj[impl] instanceof Impl.implementation) { return true; } for (let i = 0; i < module.exports.mixedInto.length; ++i) { if (obj instanceof module.exports.mixedInto[i]) { return true; } } } return false; }, isImpl(obj) { if (obj) { if (obj instanceof Impl.implementation) { return true; } const wrapper = utils.wrapperForImpl(obj); for (let i = 0; i < module.exports.mixedInto.length; ++i) { if (wrapper instanceof module.exports.mixedInto[i]) { return true; } } } return false; }, create(constructorArgs, privateData) { let obj = Object.create(HTMLProgressElement.prototype); this.setup(obj, constructorArgs, privateData); return obj; }, createImpl(constructorArgs, privateData) { let obj = Object.create(HTMLProgressElement.prototype); this.setup(obj, constructorArgs, privateData); return utils.implForWrapper(obj); }, _internalSetup(obj) { HTMLElement._internalSetup(obj); }, setup(obj, constructorArgs, privateData) { if (!privateData) privateData = {}; privateData.wrapper = obj; this._internalSetup(obj); obj[impl] = new Impl.implementation(constructorArgs, privateData); obj[impl][utils.wrapperSymbol] = obj; }, interface: HTMLProgressElement, expose: { Window: { HTMLProgressElement: HTMLProgressElement } } }; module.exports = iface; const Impl = require("../nodes/HTMLProgressElement-impl.js");
<?php /** * Command class is a base class shared among all the command executable with * Doctrine\OrientDB's SQL synthax. * * @package Doctrine\OrientDB * @subpackage Query * @author Alessandro Nadalin <alessandro.nadalin@gmail.com> */ namespace Doctrine\OrientDB\Query; use Doctrine\OrientDB\Exception; use Doctrine\OrientDB\LogicException; use Doctrine\OrientDB\Query\Formatter\QueryInterface as QueryFormatterInterface; use Doctrine\OrientDB\Query\Formatter\Query as Formatter; use Doctrine\OrientDB\Query\Validator\Rid as RidValidator; use Doctrine\OrientDB\Query\Validator\Escaper as EscapeValidator; abstract class Command implements CommandInterface { protected $ridValidator; protected $escapeValidator; protected $formatter; protected $formatters = array(); protected $tokens = array(); /** * These are the valid return types for commands */ const RETURN_COUNT = 'COUNT'; const RETURN_BEFORE = 'BEFORE'; const RETURN_AFTER = 'AFTER'; /** * Builds a new object, creating the SQL statement from the class SCHEMA * and initializing the tokens. */ public function __construct() { $this->tokens = $this->getTokens(); $this->ridValidator = new RidValidator(); $this->escapeValidator = new EscapeValidator(); } /** * Returns the schema template for the command. */ protected function getSchema() { return null; } /** * Sets a where token using the AND operator. * If the $condition contains a "?", it will be replaced by the $value. * * @param string $condition * @param string $value * @return Command */ public function andWhere($condition, $value = null) { return $this->where($condition, $value, true, "AND"); } /** * Sets the token for the from clause. You can $append your values. * * @param array $target * @param boolean $append */ public function from(array $target, $append = true) { $this->setTokenvalues('Target', $target, $append); return $this; } /** * Returns the raw SQL query incapsulated by the current object. * * @return string */ public function getRaw() { return $this->getValidStatement(); } /** * Analyzing the command's SCHEMA, this method returns all the tokens * allocable in the command. * * @return array */ public function getTokens() { preg_match_all("/(\:\w+)/", $this->getSchema(), $matches); $tokens = array(); foreach ($matches[0] as $match) { $tokens[$match] = array(); } return $tokens; } /** * Returns the value of a token. * * @param string $token * @return mixed */ public function getTokenValue($token) { return $this->checkToken($this->tokenize($token)); } /** * Sets a where token using the OR operator. * If the $condition contains a "?", it will be replaced by the $value. * * @param string $condition * @param string $value * @return Command */ public function orWhere($condition, $value = null) { return $this->where($condition, $value, true, "OR"); } /** * Deletes all the WHERE conditions in the current command. * * @return true */ public function resetWhere() { $this->clearToken('Where'); return true; } /** * Sets the internal query formatter object. * * @param QueryFormatterInterface $formatter */ public function setFormatter(QueryFormatterInterface $formatter) { $this->formatter = $formatter; } /** * Adds a WHERE conditions into the current query. * * @param string $condition * @param mixed $value * @param boolean $append * @param string $clause */ public function where($condition, $value = null, $append = false, $clause = "WHERE") { if (is_array($value)) { $condition = $this->formatWhereConditionWithMultipleTokens($condition, $value, $this->escapeValidator); } else { if ($value === null) { $condition = preg_replace("/=\s*\?/", "IS ?", $condition, 1); $value = 'NULL'; } else if (is_bool($value)) { $value = $value ? 'TRUE' : 'FALSE'; } else if (is_int($value) || is_float($value)) { // Preserve $value as is } else { $rid = $this->ridValidator->check($value, true); $value = $rid ? $rid : '"' . $this->escapeValidator->check($value, true) . '"'; } $condition = str_replace("?", $value, $condition); } if (!$this->getTokenValue('Where')) { $clause = 'WHERE'; } $this->setTokenValues('Where', array("{$clause} $condition"), $append, false, false); return $this; } /** * Returns whether this query, when executed, should have the collection hydrated. * The default is true * * @return boolean */ public function canHydrate() { return true; } /** * Sets the Returns token * * @param string $return */ public function returns($returns) { //check if the Return clause is even supported $returnTypes = $this->getValidReturnTypes(); if (count($returnTypes) <= 0) { throw new LogicException("Return clause not supported for this statement"); } $returns = strtoupper($returns); if (!in_array($returns, $returnTypes)) { throw new LogicException(sprintf("Unknown return type %s", $returns)); } $this->setToken('Returns', $returns); } /** * Returns the array of valid params for the Return clause. * Use this function to support the Return clause by overriding and returing a list in the subclass * * @return array() */ public function getValidReturnTypes() { return array(); } /** * Appends a token to the query, without deleting existing values for the * given $token. * * @param string $token * @param mixed $values * @param boolean $first */ protected function appendToken($token, $values, $first = false) { foreach ($values as $key => $value) { if ($first) { array_unshift($this->tokens[$token], $value); } else { $method = "appendTokenAs" . ucfirst(gettype($key)); $this->$method($token, $key, $value); } } } /** * Appends $value to the query $token, using $key to identify the $value in * the token array. * With this method you set a token value and can retrieve it by its key. * * @param string $token * @param string $key * @param mixed $value */ protected function appendTokenAsString($token, $key, $value) { $this->tokens[$token][$key] = $value; } /** * Appends $value to the query $token. * * @param string $token * @param string $key * @param mixed $value */ protected function appendTokenAsInteger($token, $key, $value) { $this->tokens[$token][] = $value; } /** * Checks if a token is set, returning it if it is. * * @param string $token * @return mixed * @throws TokenNotFoundException */ protected function checkToken($token) { if (!array_key_exists($token, $this->tokens)) { throw new TokenNotFoundException($token, get_called_class()); } return $this->tokens[$token]; } /** * Clears the value of a token. * * @param string $token */ protected function clearToken($token) { $token = $this->tokenize($token); $this->checkToken($token); $this->tokens[$token] = array(); } /** * Returns a brand new instance of a Formatter in order to format query * tokens. * * @return QueryFormatterInterface */ protected function getFormatter() { return $this->formatter ?: new Formatter(); } /** * Returns the formatters for this query's tokens. * * @return Array */ protected function getTokenFormatters() { return array( 'Target' => "Doctrine\OrientDB\Query\Formatter\Query\Target", 'Where' => "Doctrine\OrientDB\Query\Formatter\Query\Where", 'Class' => "Doctrine\OrientDB\Query\Formatter\Query\Regular", 'Property' => "Doctrine\OrientDB\Query\Formatter\Query\Regular", 'Type' => "Doctrine\OrientDB\Query\Formatter\Query\Regular", 'Rid' => "Doctrine\OrientDB\Query\Formatter\Query\Rid" ); } /** * Returns the formatter for a particular token. * * @param string $token * @return Array * @throws string */ protected function getTokenFormatter($token) { $formatters = $this->getTokenFormatters(); if (!array_key_exists($token, $formatters)) { $message = "The class %s does not know how to format the %s token\n". "Have you added it in the getTokenFormatters() method?"; throw new Exception(sprintf($message, get_called_class(), $token)); } return $formatters[$token]; } /** * Returns the values to replace command's schema tokens. * * @return array */ protected function getTokenReplaces() { $replaces = array(); foreach ($this->tokens as $token => $value) { $key = $this->getFormatter()->untokenize($token); $formatter = $this->getTokenFormatter($key); $replaces[$token] = $formatter::format($value); } return $replaces; } /** * Build the command replacing schema tokens with actual values and cleaning * the command synthax. * * @return string */ protected function getValidStatement() { $schema = $this->getSchema(); $statement = $this->replaceTokens($schema); $statement = preg_replace('/( ){2,}/', ' ', $statement); return trim($statement); } /** * Substitutes multiple tokens ($values) in the WHERE $condition. * * @param string $condition * @param array $values * @return string * @throws LogicException */ protected function formatWhereConditionWithMultipleTokens( $condition, Array $values, EscapeValidator $validator ) { if (count($values) !== substr_count($condition, '?')) { throw new LogicException("Number of given parameters does not match number of tokens"); } foreach ($values as $replacement) { $condition = preg_replace("/\?/", '"' . $validator->check($replacement, 1) . '"', $condition, 1); } return $condition; } /** * Replaces the tokens in the command's schema with their actual values in * the current object. * * @param string $statement * @return string */ protected function replaceTokens($statement) { $replaces = $this->getTokenReplaces(); return str_replace(array_keys($replaces), $replaces, $statement); } /** * Sets a single value for a token, * * @param string $token * @param string $tokenValue * @param boolean $append * @param boolean $first * @return true */ public function setToken($token, $tokenValue, $append = false, $first = false) { return $this->setTokenValues($token, array($tokenValue), $append, $first); } /** * Sets the values of a token, and can be appended with the given $append. * * @param string $token * @param array $tokenValues * @param boolean $append * @param boolean $first * @param boolean $filter * @return true */ protected function setTokenValues($token, array $tokenValues, $append = true, $first = false) { $token = $this->tokenize($token); $this->checkToken($token); if (is_array($this->tokens[$token]) && is_array($tokenValues)) { if ($append) { $this->appendToken($token, $tokenValues, $first); } else { $this->unsetToken($token); $this->tokens[$token] = $tokenValues; } } return true; } /** * Deletes a token. * * @param string $token */ protected function unsetToken($token) { unset($this->tokens[$token]); } /** * Tokenizes a string. * * @param string $token * @return string */ protected function tokenize($token) { return $this->getFormatter()->tokenize($token); } }
# ---------------------------------------------------------------------------------- # # Copyright Microsoft Corporation # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ---------------------------------------------------------------------------------- ########################################################################### General TrafficManager Scenario Tests###################################################################### <# .SYNOPSIS Tests any cloud based cmdlet with invalid credentials and expect it'll throw an exception. #> function Test-WithInvalidCredentials { param([ScriptBlock] $cloudCmdlet) # Setup Remove-AllSubscriptions # Test Assert-Throws $cloudCmdlet "No current subscription has been designated. Use Select-AzureSubscription -Current <subscriptionName> to set the current subscription." } ########################################################################### Remove-Profile Scenario Tests ########################################################################### <# .SYNOPSIS Tests New-AzureTrafficManagerProfile and Remove-AzureTrafficManagerProfile #> function Test-CreateAndRemoveProfile { # Setup $profileName = Get-ProfileName New-Profile $profileName # Test $isDeleted = Remove-AzureTrafficManagerProfile -Name $profileName -Force -PassThru # Assert Assert-True { $isDeleted } "Failed to delete profile $profileName" Assert-Throws { Get-AzureTrafficManagerProfile -Name $profileName } "ResourceNotFound: The specified profile name $profileName does not exist." } <# .SYNOPSIS Tests Remove-AzureTrafficManagerProfile with non existing name #> function Test-RemoveProfileWithNonExistingName { # Setup $existingProfileName = Get-ProfileName $nonExistingProfileName = Get-ProfileName "nonexisting" # Need to have at least one profile in the subscription or the error will be "missing subscription" New-Profile $existingProfileName # Assert Assert-Throws { Remove-AzureTrafficManagerProfile -Name $nonExistingProfileName -Force } "ResourceNotFound: The specified profile name $nonExistingProfileName does not exist." } ########################################################################### Get-Profile Scenario Tests ########################################################################### <# .SYNOPSIS Tests Get-AzureTrafficManagerProfile <name> #> function Test-GetProfile { # Setup $profileName = Get-ProfileName $createdProfile = New-Profile $profileName # Test $retrievedProfile = Get-AzureTrafficManagerProfile $profileName # Assert Assert-AreEqualObjectProperties $createdProfile $retrievedProfile } <# .SYNOPSIS Tests Get-AzureTrafficManagerProfile #> function Test-GetMultipleProfiles { # Setup $profileName1 = Get-ProfileName 1 $profileName2 = Get-ProfileName 2 $createdProfile1 = New-Profile $profileName1 $createdProfile2 = New-Profile $profileName2 # Test $retrievedProfiles = Get-AzureTrafficManagerProfile # Assert Assert-True { $($retrievedProfiles | select -ExpandProperty Name) -Contains $profileName1 } "Assert failed, profile '$profileName1' not found" Assert-True { $($retrievedProfiles | select -ExpandProperty Name) -Contains $profileName2 } "Assert failed, profile '$profileName2' not found" } ########################################################################### Enable-Profile, Disable-Profile Scenario Tests ########################################################################### <# .SYNOPSIS Tests Disable-AzureTrafficManagerProfile #> function Test-DisableProfile { # Setup $profileName = Get-ProfileName New-Profile $profileName # Test Disable-AzureTrafficManagerProfile $profileName $disabledProfile = Get-AzureTrafficManagerProfile -Name $profileName # Assert Assert-AreEqual "Disabled" $disabledProfile.Status } <# .SYNOPSIS Tests Enable-AzureTrafficManagerProfile #> function Test-EnableProfile { # Setup $profileName = Get-ProfileName New-Profile $profileName # Test Disable-AzureTrafficManagerProfile $profileName Enable-AzureTrafficManagerProfile $profileName $enabledProfile = Get-AzureTrafficManagerProfile -Name $profileName # Assert Assert-AreEqual "Enabled" $enabledProfile.Status } ########################################################################### New-Profile Scenario Tests ########################################################################### <# .SYNOPSIS Tests New-AzureTrafficManagerProfile #> function Test-NewProfile { # Setup $profileName = Get-ProfileName # Test $createdProfile = New-Profile $profileName # Assert Assert-AreEqual $($profileName + ".trafficmanager.net") $createdProfile.DomainName Assert-AreEqual $profileName $createdProfile.Name Assert-AreEqual RoundRobin $createdProfile.LoadBalancingMethod Assert-AreEqual 80 $createdProfile.MonitorPort Assert-AreEqual Http $createdProfile.MonitorProtocol Assert-AreEqual "/" $createdProfile.MonitorRelativePath Assert-AreEqual 300 $createdProfile.TimeToLiveInSeconds Assert-AreEqual "Enabled" $createdProfile.Status Assert-AreEqual "Inactive" $createdProfile.MonitorStatus } <# .SYNOPSIS Tests New-AzureTrafficManagerProfile with invalid parameter #> function Test-NewProfileWithInvalidParameter { # Setup $profileName = Get-ProfileName # Assert $expectedMessage = "A policy with the requested domain name could not be created because the name INVALID does not end with the expected value .trafficmanager.net." Assert-Throws { New-AzureTrafficManagerProfile -Name $profileName -DomainName "INVALID" -LoadBalancingMethod RoundRobin -MonitorPort 80 -MonitorProtocol Http -MonitorRelativePath "/" -Ttl 300 } } ########################################################################### Set-Profile Scenario Tests ########################################################################### <# .SYNOPSIS Tests Set-AzureTrafficManagerProfile #> function Test-SetProfileProperty { # Setup $profileName = Get-ProfileName $createdProfile = New-Profile $profileName # Test $updatedProfile = Set-AzureTrafficManagerProfile -TrafficManagerProfile $createdProfile -Name $createdProfile.Name -Ttl 333 # Assert Assert-AreEqual 333 $updatedProfile.TimeToLiveInSeconds } <# .SYNOPSIS Tests Add-AzureTrafficManagerEndpoint #> function Test-AddAzureTrafficManagerEndpoint { # Setup $profileName = Get-ProfileName $createdProfile = New-Profile $profileName #Test $updatedProfile = $createdProfile | Add-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Type Any -Status Enabled -Weight 3 -Location "West US" | Set-AzureTrafficManagerProfile # Assert $profileMonitoringStatus = $updatedProfile.MonitorStatus $endpointMonitoringStatus = $updatedProfile.Endpoints[0].MonitorStatus Assert-AreEqual 1 $updatedProfile.Endpoints.Count Assert-True { $profileMonitoringStatus -eq "CheckingEndpoints" -or $profileMonitoringStatus -eq "Online" } "Assert failed as endpoint MonitoringStatus has an unexpected value: $profileMonitoringStatus" Assert-AreEqual Any $updatedProfile.Endpoints[0].Type Assert-AreEqual "www.microsoft.com" $updatedProfile.Endpoints[0].DomainName Assert-AreEqual Enabled $updatedProfile.Endpoints[0].Status Assert-AreEqual 3 $updatedProfile.Endpoints[0].Weight Assert-AreEqual "West US" $updatedProfile.Endpoints[0].Location Assert-True { $endpointMonitoringStatus -eq "CheckingEndpoint" -or $endpointMonitoringStatus -eq "Online" } "Assert failed as endpoint MonitoringStatus has an unexpected value: $endpointMonitoringStatus" } <# .SYNOPSIS Tests Add-AzureTrafficManagerEndpoint with no weight or location #> function Test-AddAzureTrafficManagerEndpointNoWeightLocation { # Setup $profileName = Get-ProfileName $createdProfile = New-Profile $profileName #Test $updatedProfile = $createdProfile | Add-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Type Any -Status Enabled | Set-AzureTrafficManagerProfile # Assert $profileMonitoringStatus = $updatedProfile.MonitorStatus $endpointMonitoringStatus = $updatedProfile.Endpoints[0].MonitorStatus Assert-AreEqual 1 $updatedProfile.Endpoints.Count Assert-True { $profileMonitoringStatus -eq "CheckingEndpoints" -or $profileMonitoringStatus -eq "Online" } "Assert failed as endpoint MonitoringStatus has an unexpected value: $profileMonitoringStatus" Assert-AreEqual Any $updatedProfile.Endpoints[0].Type Assert-AreEqual "www.microsoft.com" $updatedProfile.Endpoints[0].DomainName Assert-AreEqual Enabled $updatedProfile.Endpoints[0].Status # Test for default values Assert-AreEqual 1 $updatedProfile.Endpoints[0].Weight Assert-Null $updatedProfile.Endpoints[0].Location Assert-True { $endpointMonitoringStatus -eq "CheckingEndpoint" -or $endpointMonitoringStatus -eq "Online" } "Assert failed as endpoint MonitoringStatus has an unexpected value: $endpointMonitoringStatus" } <# .SYNOPSIS Tests Add-AddAzureTrafficManagerEndpointNoWeightLocation with no MinChildEndpoints #> function Test-AddAzureTrafficManagerEndpointNoMinChildEndpoints { # Setup $profileName = Get-ProfileName $createdProfile = New-Profile $profileName #Test $updatedProfile = $createdProfile | Add-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Type Any -Status Enabled | Set-AzureTrafficManagerProfile # Assert Assert-AreEqual 1 $updatedProfile.Endpoints.Count Assert-AreEqual $null $updatedProfile.Endpoints[0].MinChildEndpoints } <# .SYNOPSIS Tests Add-AddAzureTrafficManagerEndpointTypeTrafficManager #> function Test-AddAzureTrafficManagerEndpointTypeTrafficManager { # Setup $nestedProfileName = Get-ProfileName $nestedDomainName = $nestedProfileName + $TrafficManagerDomain $randomValue = Get-Random -Maximum 10000 $topLevelProfileName = "toplevelprofile" + $randomValue $nestedProfile = New-Profile $nestedProfileName $topLevelProfile = New-Profile $topLevelProfileName #Test $updatedNestedProfile = $nestedProfile | Add-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Type Any -Status Enabled | Set-AzureTrafficManagerProfile $updatedTopLevelProfile = $topLevelProfile | Add-AzureTrafficManagerEndpoint -DomainName $nestedDomainName -Type TrafficManager -Status Enabled | Set-AzureTrafficManagerProfile # Assert Assert-AreEqual 1 $updatedNestedProfile.Endpoints.Count Assert-AreEqual Any $updatedNestedProfile.Endpoints[0].Type Assert-AreEqual 1 $updatedTopLevelProfile.Endpoints.Count Assert-AreEqual TrafficManager $updatedTopLevelProfile.Endpoints[0].Type Remove-AzureTrafficManagerProfile -Name $topLevelProfileName -Force -PassThru } <# .SYNOPSIS Tests Set-AzureTrafficManagerEndpoint not updating Weight or Location #> function Test-SetAzureTrafficManagerEndpoint { # Setup $profileName = Get-ProfileName $createdProfile = New-Profile $profileName | Add-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Type Any -Status Enabled -Weight 3 -Location "West US" | Set-AzureTrafficManagerProfile # Assert Assert-AreEqual 3 $createdProfile.Endpoints[0].Weight Assert-AreEqual "West US" $createdProfile.Endpoints[0].Location # Test $updatedProfile = $createdProfile | Set-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Status Disabled | Set-AzureTrafficManagerProfile # Assert Assert-AreEqual 1 $updatedProfile.Endpoints.Count Assert-AreEqual "www.microsoft.com" $updatedProfile.Endpoints[0].DomainName Assert-AreEqual Disabled $updatedProfile.Endpoints[0].Status Assert-AreEqual 3 $updatedProfile.Endpoints[0].Weight Assert-AreEqual "West US" $updatedProfile.Endpoints[0].Location } <# .SYNOPSIS Tests Set-AzureTrafficManagerEndpoint updating Weight and Location #> function Test-SetAzureTrafficManagerEndpointUpdateWeightLocation { # Setup $profileName = Get-ProfileName $createdProfile = New-Profile $profileName | Add-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Type Any -Status Enabled | Set-AzureTrafficManagerProfile # Assert Assert-AreEqual 1 $createdProfile.Endpoints[0].Weight Assert-Null $createdProfile.Endpoints[0].Location #Test $updatedProfile = $createdProfile | Set-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Status Disabled -Weight 3 -Location "West US" | Set-AzureTrafficManagerProfile # Assert Assert-AreEqual 1 $updatedProfile.Endpoints.Count Assert-AreEqual "www.microsoft.com" $updatedProfile.Endpoints[0].DomainName Assert-AreEqual Disabled $updatedProfile.Endpoints[0].Status Assert-AreEqual 3 $updatedProfile.Endpoints[0].Weight Assert-AreEqual "West US" $updatedProfile.Endpoints[0].Location } <# .SYNOPSIS Tests Set-AzureTrafficManagerEndpoint when it adds endpoints #> function Test-SetAzureTrafficManagerEndpointAdds { # Setup $profileName = Get-ProfileName $createdProfile = New-Profile $profileName $createdProfile = $createdProfile | Set-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Type Any -Status Enabled | Set-AzureTrafficManagerEndpoint -DomainName "www.windows.com" -Type Any -Status Enabled | Set-AzureTrafficManagerProfile # Assert Assert-AreEqual 2 $createdProfile.Endpoints.Count Assert-True { $($createdProfile.Endpoints | select -ExpandProperty DomainName) -Contains "www.microsoft.com" } "Assert failed, endpoint 'www.microsoft.com' not found" Assert-True { $($createdProfile.Endpoints | select -ExpandProperty DomainName) -Contains "www.windows.com" } "Assert failed, endpoint 'www.microsoft.com' not found" # Test $updatedProfile = $createdProfile | Set-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Status Disabled | Set-AzureTrafficManagerProfile # Assert Assert-AreEqual 2 $updatedProfile.Endpoints.Count Assert-True { $($updatedProfile.Endpoints | select -ExpandProperty DomainName) -Contains "www.microsoft.com" } "Assert failed, endpoint 'www.microsoft.com' not found" Assert-AreEqual Disabled $($updatedProfile.Endpoints | where {$_.DomainName -eq "www.microsoft.com"}).Status } <# .SYNOPSIS Tests Remove-AzureTrafficManagerEndpoint #> function Test-RemoveAzureTrafficManagerEndpoint { # Setup $profileName = Get-ProfileName $createdProfile = New-Profile $profileName | Add-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Type Any -Status Enabled | Set-AzureTrafficManagerProfile #Test $updatedProfile = $createdProfile | Remove-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" | Set-AzureTrafficManagerProfile # Assert Assert-AreEqual 0 $updatedProfile.Endpoints.Count } <# .SYNOPSIS Tests multiple Add-AzureTrafficManagerEndpoint #> function Test-AddMultipleAzureTrafficManagerEndpoint { # Setup $profileName = Get-ProfileName $createdProfile = New-Profile $profileName | Add-AzureTrafficManagerEndpoint -DomainName "www.microsoft.com" -Type Any -Status Enabled $createdProfile = $createdProfile | Add-AzureTrafficManagerEndpoint -DomainName "www.bing.com" -Type Any -Status Enabled #Test $updatedProfile = $createdProfile | Set-AzureTrafficManagerProfile # Assert Assert-AreEqual 2 $updatedProfile.Endpoints.Count } ########################################################################### Test-TrafficManagerDomainName Scenario Tests ########################################################################### <# .SYNOPSIS Tests Test-AzureTrafficManagerDomainName #> function Test-TestAzureTrafficManagerDomainName { $profileName = Get-ProfileName Assert-True { Test-AzureTrafficManagerDomainName -DomainName $profileName$TrafficManagerDomain } "Assert failed, domain name $profileName$TrafficManagerDomain is not available" $createdProfile = New-Profile $profileName Assert-False { Test-AzureTrafficManagerDomainName -DomainName $profileName$TrafficManagerDomain } "Assert failed, domain name $profileName$TrafficManagerDomain is available after creating a profile with that name" }
package org.jasig.cas.authentication.handler.support; import org.jasig.cas.TestUtils; import org.jasig.cas.authentication.handler.AuthenticationException; import org.jasig.cas.authentication.handler.PlainTextPasswordEncoder; import org.jasig.cas.authentication.handler.support.SimpleTestUsernamePasswordAuthenticationHandler; import org.jasig.cas.authentication.principal.UsernamePasswordCredentials; import junit.framework.TestCase; /** * Test of the simple username/password handler * * @author Scott Battaglia * @version $Revision: 14064 $ $Date: 2007-06-10 09:17:55 -0400 (Sun, 10 Jun 2007) $ * @since 3.0 */ public final class SimpleTestUsernamePasswordHandlerTests extends TestCase { private SimpleTestUsernamePasswordAuthenticationHandler authenticationHandler; protected void setUp() throws Exception { this.authenticationHandler = new SimpleTestUsernamePasswordAuthenticationHandler(); this.authenticationHandler .setPasswordEncoder(new PlainTextPasswordEncoder()); } public void testSupportsProperUserCredentials() { assertTrue(this.authenticationHandler.supports(TestUtils .getCredentialsWithSameUsernameAndPassword())); } public void testDoesntSupportBadUserCredentials() { assertFalse(this.authenticationHandler.supports(TestUtils .getHttpBasedServiceCredentials())); } public void testValidUsernamePassword() throws AuthenticationException { assertTrue(this.authenticationHandler.authenticate(TestUtils .getCredentialsWithSameUsernameAndPassword())); } public void testInvalidUsernamePassword() { try { assertFalse(this.authenticationHandler.authenticate(TestUtils .getCredentialsWithDifferentUsernameAndPassword())); } catch (AuthenticationException ae) { // this is okay } } public void testNullUsernamePassword() { try { assertFalse(this.authenticationHandler.authenticate(TestUtils .getCredentialsWithSameUsernameAndPassword(null))); } catch (AuthenticationException ae) { // this is okay } } public void testAlternateClass() { this.authenticationHandler.setClassToSupport(UsernamePasswordCredentials.class); assertTrue(this.authenticationHandler.supports(new UsernamePasswordCredentials())); } public void testAlternateClassWithSubclassSupport() { this.authenticationHandler.setClassToSupport(UsernamePasswordCredentials.class); this.authenticationHandler.setSupportSubClasses(true); assertTrue(this.authenticationHandler.supports(new ExtendedCredentials())); } public void testAlternateClassWithNoSubclassSupport() { this.authenticationHandler.setClassToSupport(UsernamePasswordCredentials.class); this.authenticationHandler.setSupportSubClasses(false); assertFalse(this.authenticationHandler.supports(new ExtendedCredentials())); } protected class ExtendedCredentials extends UsernamePasswordCredentials { private static final long serialVersionUID = 406992293105518363L; // nothing to see here } }
<?php /** * Toolbox Module's TagManager * * @author : İskender TOTOĞLU | Altı ve Bir Bilişim Teknolojileri | http://www.altivebir.com.tr */ class Toolbox_Tags extends TagManager { /** * Tags declaration * To be available, each tag must be declared in this static array. * * @var array * * @usage <ion:toolbox:tag... /> * */ public static $tag_definitions = array ( 'toolbox:minify' => 'tag_minify', 'toolbox:qrcode' => 'tag_qrcode', 'toolbox:recaptcha' => 'tag_recaptcha' ); // ------------------------------------------------------------------------ /** * Base module tag * * @usage <ion:toolbox /> * */ public static function index(FTL_Binding $tag) { $str = $tag->expand(); return $str; } // ------------------------------------------------------------------------ /** * Minify Tag * * !NOTE : Filenames and output filenames not need extention, write filenames without extentions.. * * @usage <ion:toolbox:minify type="css|js" files="files, you, want, to, minify, with, file, extention" output_name="your_output_file_name" /> * * @param FTL_Binding $tag * @return mixed */ public static function tag_minify(FTL_Binding $tag) { $type = $tag->getAttribute('type', NULL); $files = $tag->getAttribute('files', NULL); $outputName = $tag->getAttribute('output', 'template'); $url = ((strtoupper($tag->getAttribute('url', FALSE)) != TRUE) ? FALSE : TRUE); if( ! is_null($type) && ($type == 'css' || $type == 'js') && ! is_null($files) ) { // Get Files as Array $files = explode(',', str_replace(', ', ',', $files)); // Get Module Configs $moduleConfigs = Modules()->get_module_config("toolbox"); $assets_dir = ($moduleConfigs['minify_assets_dir'] != '') ? Theme::get_theme_path() . $moduleConfigs['minify_assets_dir'] . DIRECTORY_SEPARATOR : ''; $assets_path = DOCPATH . $assets_dir; $build_dir = ($moduleConfigs['minify_build_dir'] != '') ? $moduleConfigs['minify_build_dir'] . DIRECTORY_SEPARATOR : ''; $js_dir = ($moduleConfigs['minify_js_dir'] != '') ? $moduleConfigs['minify_js_dir'] . DIRECTORY_SEPARATOR : ''; $js_path = $assets_path . $js_dir; $js_build_dir = ($moduleConfigs['minify_js_dir'] != '') ? $moduleConfigs['minify_js_dir'] . DIRECTORY_SEPARATOR . $build_dir : $js_dir; $js_build_path = $js_path . $build_dir; $css_dir = ($moduleConfigs['minify_css_dir'] != '') ? $moduleConfigs['minify_css_dir'] . DIRECTORY_SEPARATOR : $assets_dir; $css_path = $assets_path . $css_dir; $css_build_dir = ($moduleConfigs['minify_css_dir'] != '') ? $moduleConfigs['minify_css_dir'] . DIRECTORY_SEPARATOR . $build_dir : $css_dir; $css_build_path = $css_path . $build_dir; unset($configs); // Set Module Configs to Array $configs = array( 'development' => ($moduleConfigs['minify_development'] == '1') ? TRUE : FALSE, 'compress' => ($moduleConfigs['minify_compress'] == '1') ? TRUE : FALSE, 'local_compiler' => ($moduleConfigs['minify_local_compiler'] == '1') ? TRUE : FALSE, 'assets_dir' => $assets_dir, 'assets_path' => $assets_path, 'js_dir' => $js_dir, 'js_path' => $js_path, 'js_build_dir' => $js_build_dir, 'js_build_path' => $js_build_path, 'css_dir' => $css_dir, 'css_path' => $css_path, 'css_build_dir' => $css_build_dir, 'css_build_path' => $css_build_path, 'type' => $type, 'files' => $files, 'output' => $outputName . '.' . $type, 'output_min' => $outputName . '.min.' . $type, 'url' => $url ); // Load Toolbox Model self::load_model('toolbox_model', ''); // Check Files Log Missing files $configs = self::$ci->toolbox_model->check_files($configs, $type); /** * Compare file timestamps * * Return Changed Files * * @TODO Change "$configs['output_min']" minified file with normal file, compare not compressed files and compare */ $compare_files = self::$ci->toolbox_model->check_filetime($configs[$type . '_build_path'], $files, $configs[$type . '_path'] . $configs['output_min']); log_message('ERROR', '#compare_files :: ' . print_r($compare_files, TRUE)); if( $type == 'js' && ! empty($configs['files']) ) { if( $configs['development'] != TRUE && ! empty($compare_files) && $configs['compress'] == TRUE && $configs['local_compiler'] == TRUE && self::$ci->toolbox_model->_check_local_compiler() ) { log_message('ERROR', 'JS:STEP::1'); return self::$ci->toolbox_model->local_closure_compiler($configs); } elseif( $configs['development'] != TRUE && ! empty($compare_files) && $configs['compress'] == TRUE && $configs['local_compiler'] == FALSE ) { log_message('ERROR', 'JS:STEP::2'); return self::$ci->toolbox_model->minify_js($configs); } elseif( $configs['development'] != TRUE && empty($compare_files) && file_exists($configs['js_path'] . $configs['output']) ) { log_message('ERROR', 'JS:STEP::3'); return self::$ci->toolbox_model->_script_tag($configs['assets_dir'] . $configs['js_dir'], $configs['output'], $configs['url']); } else { log_message('ERROR', 'JS:STEP::4'); return self::$ci->toolbox_model->minify_js($configs, FALSE); } } if( $type == 'css' ) { if( $configs['development'] != TRUE && ! empty($compare_files) && $configs['compress'] == TRUE ) { log_message('ERROR', 'CSS:STEP::1'); return self::$ci->toolbox_model->minify_css($configs); } else { log_message('ERROR', 'CSS:STEP::2'); return self::$ci->toolbox_model->minify_css($configs, FALSE); } } return self::show_tag_error($tag, 'You need to define file name or file names. Ex.: "files="my_css|my_other_css" or files="my_js|my_other_js".'); } return self::show_tag_error($tag, 'Type is NULL or Type has wrong value. Available values "type="css" or type="js"."'); } // ------------------------------------------------------------------------ /** * @usage <ion:toolbox:qrcode /> * * @param FTL_Binding $tag * @return mixed */ public static function tag_qrcode(FTL_Binding $tag) { $parent = $tag->getAttribute('parent', FALSE); $data = $tag->getAttribute('data', NULL); $level = $tag->getAttribute('level', NULL); $size = $tag->getAttribute('size', NULL); /** * If "Recaptcha" Class not Loaded, Load "Recaptcha" Class */ if ( ! class_exists('ciqrcode') ) { $config = array( 'qr_file_path' => 'files' . DIRECTORY_SEPARATOR . '.qrcode' . DIRECTORY_SEPARATOR, 'qr_libraries_path' => MODPATH . 'Toolbox' . DIRECTORY_SEPARATOR . 'libraries' . DIRECTORY_SEPARATOR, 'cachedir' => 'files' . DIRECTORY_SEPARATOR . '.qrcode' . DIRECTORY_SEPARATOR . 'cache' . DIRECTORY_SEPARATOR, 'errorlog' => 'files' . DIRECTORY_SEPARATOR . '.qrcode' . DIRECTORY_SEPARATOR . 'logs' . DIRECTORY_SEPARATOR, ); self::_check_qr_folder($config); self::$ci->load->library('ciqrcode', $config); } $getParent = $tag->get($parent); $qrData = ''; if( ! empty($getParent) ) { $data = explode('|', $data); // trace($tag->get('article')); foreach($data as $key => $value) { (( ! empty($getParent[$value]) ) ? $qrData .= $getParent[$value] . "\n" : ''); log_message('ERROR', '#value :: ' . $value); } } $params['data'] = $qrData; $params['level'] = $level; // L - M - Q - H $params['size'] = $size; // 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 $qrcode_file_path = 'files' . DIRECTORY_SEPARATOR . '.qrcode' . DIRECTORY_SEPARATOR; if( ! file_exists(DOCPATH . $qrcode_file_path) ) @mkdir(DOCPATH . $qrcode_file_path, 0777); $params['savename'] = DOCPATH . $qrcode_file_path . 'test.png'; self::$ci->ciqrcode->generate($params); return '<img src="' . base_url() . $qrcode_file_path . 'test.png" />'; } // ------------------------------------------------------------------------ function _check_qr_folder($params=array()) { /** * Check Folders Exist, If not create folders */ if( ! file_exists(DOCPATH . $params['qr_file_path']) ) @mkdir(DOCPATH . $params['qr_file_path'], 0777); if( ! file_exists(DOCPATH . $params['cachedir']) ) @mkdir(DOCPATH . $params['cachedir'], 0777); if( ! file_exists(DOCPATH . $params['errorlog']) ) @mkdir(DOCPATH . $params['errorlog'], 0777); return; } // ------------------------------------------------------------------------ /** * @usage <ion:toolbox:recaptcha /> * * @param FTL_Binding $tag * @return mixed */ public static function tag_recaptcha(FTL_Binding $tag) { /** * If "Recaptcha" Class not Loaded, Load "Recaptcha" Class */ if ( ! class_exists('recaptcha') ) { self::$ci->load->library('recaptcha'); } return self::$ci->recaptcha->recaptcha_get_html(); } }
package edu.sampleu.krad.travelview; import org.junit.Ignore; import org.junit.Test; /** * @author Kuali Rice Team (rice.collab@kuali.org) */ public class MaintenanceAddDeleteFiscalOfficerBkMrkAft extends MaintenanceAddDeleteFiscalOfficerAftBase { @Override public String getBookmarkUrl() { return BOOKMARK_URL; } @Ignore // link removed @Test public void testMaintenanceAddDeleteFiscalOfficerBookmark() throws Exception { testMaintenanceAddDeleteFiscalOfficerBookmark(this); } }
// ZAP: 2011/08/30 Support for scanner levels // ZAP: 2012/01/02 Separate param and attack // ZAP: 2012/03/03 Added getLevel(boolean incDefault) // ZAP: 2102/03/15 Changed the type of the parameter "sb" of the method matchBodyPattern to // StringBuilder. // ZAP: 2012/04/25 Added @Override annotation to all appropriate methods. // ZAP: 2012/08/07 Renamed Level to AlertThreshold and added support for AttackStrength // ZAP: 2012/08/31 Enabled control of AttackStrength // ZAP: 2012/10/03 Issue 388 Added enabling support for technologies // ZAP: 2013/01/19 Issue 460 Add support for a scan progress dialog // ZAP: 2013/01/25 Removed the "(non-Javadoc)" comments. // ZAP: 2013/02/19 Issue 528 Scan progress dialog can show negative progress times // ZAP: 2013/04/14 Issue 611: Log the exceptions thrown by active scanners as error // ZAP: 2013/05/02 Re-arranged all modifiers into Java coding standard order // ZAP: 2013/07/12 Issue 713: Add CWE and WASC numbers to issues // ZAP: 2013/09/08 Issue 691: Handle old plugins // ZAP: 2013/11/16 Issue 842: NullPointerException while active scanning with ExtensionAntiCSRF disabled // ZAP: 2014/01/16 Add support to plugin skipping // ZAP: 2014/02/12 Issue 1030: Load and save scan policies // ZAP: 2014/02/21 Issue 1043: Custom active scan dialog // ZAP: 2014/05/15 Issue 1196: AbstractPlugin.bingo incorrectly sets evidence to attack // ZAP: 2014/05/23 Issue 1209: Reliability becomes Confidence and add levels // ZAP: 2014/07/07 Issue 389: Enable technology scope for scanners // ZAP: 2014/10/25 Issue 1062: Made plugins that calls sendandrecieve also invoke scanner // hook before and after message update // ZAP: 2014/11/19 Issue 1412: Init scan rule status (quality) from add-on // ZAP: 2015/03/26 Issue 1573: Add option to inject plugin ID in header for all ascan requests // ZAP: 2015/07/26 Issue 1618: Target Technology Not Honored package org.parosproxy.paros.core.scanner; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.net.URLEncoder; import java.security.InvalidParameterException; import java.util.Date; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.configuration.Configuration; import org.apache.commons.httpclient.HttpException; import org.apache.log4j.Logger; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.extension.encoder.Encoder; import org.parosproxy.paros.network.HttpHeader; import org.parosproxy.paros.network.HttpMessage; import org.zaproxy.zap.control.AddOn; import org.zaproxy.zap.extension.anticsrf.AntiCsrfToken; import org.zaproxy.zap.extension.anticsrf.ExtensionAntiCSRF; import org.zaproxy.zap.model.Tech; import org.zaproxy.zap.model.TechSet; public abstract class AbstractPlugin implements Plugin, Comparable<Object> { /** * Default pattern used in pattern check for most plugins. */ protected static final int PATTERN_PARAM = Pattern.CASE_INSENSITIVE | Pattern.MULTILINE; /** * CRLF string. */ protected static final String CRLF = "\r\n"; private HostProcess parent = null; private HttpMessage msg = null; // private boolean enabled = false; private Logger log = Logger.getLogger(this.getClass()); private Configuration config = null; // ZAP Added delayInMs private int delayInMs; private ExtensionAntiCSRF extAntiCSRF = null; private Encoder encoder = new Encoder(); private AlertThreshold defaultAttackThreshold = AlertThreshold.MEDIUM; private static final AlertThreshold[] alertThresholdsSupported = new AlertThreshold[]{AlertThreshold.MEDIUM}; private AttackStrength defaultAttackStrength = AttackStrength.MEDIUM; private static final AttackStrength[] attackStrengthsSupported = new AttackStrength[]{AttackStrength.MEDIUM}; private TechSet techSet = null; private Date started = null; private Date finished = null; private AddOn.Status status = AddOn.Status.unknown; /** * Default Constructor */ public AbstractPlugin() { } @Override public abstract int getId(); @Override public abstract String getName(); @Override public String getCodeName() { String result = getClass().getName(); int pos = getClass().getName().lastIndexOf("."); if (pos > -1) { result = result.substring(pos + 1); } return result; } @Override public abstract String[] getDependency(); @Override public abstract String getDescription(); @Override public abstract int getCategory(); @Override public abstract String getSolution(); @Override public abstract String getReference(); @Override public void init(HttpMessage msg, HostProcess parent) { this.msg = msg.cloneAll(); this.parent = parent; if (this.parent.getScannerParam().isInjectPluginIdInHeader()) { this.msg.getRequestHeader().setHeader(HttpHeader.X_ZAP_SCAN_ID, Integer.toString(getId())); } init(); } public abstract void init(); /** * Obtain a new HttpMessage with the same request as the base. The response * is empty. This is used by plugin to build/craft a new message to * send/receive. It does not affect the base message. * * @return A new HttpMessage with cloned request. Response is empty. */ protected HttpMessage getNewMsg() { return msg.cloneRequest(); } /** * Get the base reference HttpMessage for this check. Both request and * response is present. It should not be modified during when the plugin * runs. * * @return The base HttpMessage with request/response. */ protected HttpMessage getBaseMsg() { return msg; } /** * Send and receive a HttpMessage. msg should have the request header/body * set. Fresh copy will always be retrieved via this method. The request * header content length will be modified by this method. * * @param msg * @throws HttpException * @throws IOException */ protected void sendAndReceive(HttpMessage msg) throws HttpException, IOException { sendAndReceive(msg, true); } protected void sendAndReceive(HttpMessage msg, boolean isFollowRedirect) throws HttpException, IOException { sendAndReceive(msg, isFollowRedirect, true); } /** * Send and receive a HttpMessage. msg should have the request header/body * set. Fresh copy will always be retrieved via this method. The request * header content length will be modified by this method. * * @param msg * @param isFollowRedirect follow redirect response * @throws HttpException * @throws IOException */ protected void sendAndReceive(HttpMessage msg, boolean isFollowRedirect, boolean handleAntiCSRF) throws HttpException, IOException { if (parent.handleAntiCsrfTokens() && handleAntiCSRF) { if (extAntiCSRF == null) { extAntiCSRF = (ExtensionAntiCSRF) Control.getSingleton().getExtensionLoader().getExtension(ExtensionAntiCSRF.NAME); } if (extAntiCSRF != null) { List<AntiCsrfToken> tokens = extAntiCSRF.getTokens(msg); AntiCsrfToken antiCsrfToken = null; if (tokens.size() > 0) { antiCsrfToken = tokens.get(0); } if (antiCsrfToken != null) { regenerateAntiCsrfToken(msg, antiCsrfToken); } } } // always get the fresh copy msg.getRequestHeader().setHeader(HttpHeader.IF_MODIFIED_SINCE, null); msg.getRequestHeader().setHeader(HttpHeader.IF_NONE_MATCH, null); msg.getRequestHeader().setContentLength(msg.getRequestBody().length()); if (this.getDelayInMs() > 0) { try { Thread.sleep(this.getDelayInMs()); } catch (InterruptedException e) { // Ignore } } //ZAP: Runs the "beforeScan" methods of any ScannerHooks parent.performScannerHookBeforeScan(msg, this); parent.getHttpSender().sendAndReceive(msg, isFollowRedirect); // ZAP: Notify parent parent.notifyNewMessage(msg); //ZAP: Set the history reference back and run the "afterScan" methods of any ScannerHooks parent.performScannerHookAfterScan(msg, this); } private void regenerateAntiCsrfToken(HttpMessage msg, AntiCsrfToken antiCsrfToken) { if (antiCsrfToken == null) { return; } String tokenValue = null; try { HttpMessage tokenMsg = antiCsrfToken.getMsg().cloneAll(); // Ensure we dont loop sendAndReceive(tokenMsg, true, false); tokenValue = extAntiCSRF.getTokenValue(tokenMsg, antiCsrfToken.getName()); } catch (Exception e) { log.error(e.getMessage(), e); } if (tokenValue != null) { // Replace token value - only supported in the body right now log.debug("regenerateAntiCsrfToken replacing " + antiCsrfToken.getValue() + " with " + encoder.getURLEncode(tokenValue)); String replaced = msg.getRequestBody().toString(); replaced = replaced.replace(encoder.getURLEncode(antiCsrfToken.getValue()), encoder.getURLEncode(tokenValue)); msg.setRequestBody(replaced); extAntiCSRF.registerAntiCsrfToken(new AntiCsrfToken(msg, antiCsrfToken.getName(), tokenValue, antiCsrfToken.getFormIndex())); } } @Override public void run() { // ZAP : set skipped to false otherwise the plugin shoud stop continously //this.skipped = false; try { if (!isStop()) { this.started = new Date(); scan(); } } catch (Exception e) { getLog().error(e.getMessage(), e); } notifyPluginCompleted(getParent()); this.finished = new Date(); } /** * The core scan method to be implmented by subclass. */ @Override public abstract void scan(); /** * Generate an alert when a security issue (risk/info) is found. Default * name, description, solution of this Plugin will be used. * * @param risk * @param confidence * @param uri * @param param * @param attack * @param otherInfo * @param msg */ protected void bingo(int risk, int confidence, String uri, String param, String attack, String otherInfo, HttpMessage msg) { bingo(risk, confidence, this.getName(), this.getDescription(), uri, param, attack, otherInfo, this.getSolution(), msg); } /** * Generate an alert when a security issue (risk/info) is found. Custome * alert name, description and solution will be used. * * @param risk * @param confidence * @param name * @param description * @param uri * @param param * @param attack * @param otherInfo * @param solution * @param msg */ protected void bingo(int risk, int confidence, String name, String description, String uri, String param, String attack, String otherInfo, String solution, HttpMessage msg) { log.debug("New alert pluginid=" + +this.getId() + " " + name + " uri=" + uri); Alert alert = new Alert(this.getId(), risk, confidence, name); if (uri == null || uri.equals("")) { uri = msg.getRequestHeader().getURI().toString(); } if (param == null) { param = ""; } alert.setDetail(description, uri, param, attack, otherInfo, solution, this.getReference(), "", this.getCweId(), this.getWascId(), msg); parent.alertFound(alert); } /** * Generate an alert when a security issue (risk/info) is found. Default * name, description, solution of this Plugin will be used. * * @param risk * @param confidence * @param uri * @param param * @param attack * @param otherInfo * @param evidence * @param msg */ protected void bingo(int risk, int confidence, String uri, String param, String attack, String otherInfo, String evidence, HttpMessage msg) { bingo(risk, confidence, this.getName(), this.getDescription(), uri, param, attack, otherInfo, this.getSolution(), evidence, msg); } /** * Generate an alert when a security issue (risk/info) is found. Custome * alert name, description and solution will be used. * * @param risk * @param confidence * @param name * @param description * @param uri * @param param * @param attack * @param otherInfo * @param solution * @param evidence * @param msg */ protected void bingo(int risk, int confidence, String name, String description, String uri, String param, String attack, String otherInfo, String solution, String evidence, HttpMessage msg) { log.debug("New alert pluginid=" + +this.getId() + " " + name + " uri=" + uri); Alert alert = new Alert(this.getId(), risk, confidence, name); if (uri == null || uri.equals("")) { uri = msg.getRequestHeader().getURI().toString(); } if (param == null) { param = ""; } alert.setDetail(description, uri, param, attack, otherInfo, solution, this.getReference(), evidence, this.getCweId(), this.getWascId(), msg); parent.alertFound(alert); } protected void bingo(int risk, int confidence, String name, String description, String uri, String param, String attack, String otherInfo, String solution, String evidence, int cweId, int wascId, HttpMessage msg) { log.debug("New alert pluginid=" + +this.getId() + " " + name + " uri=" + uri); Alert alert = new Alert(this.getId(), risk, confidence, name); if (uri == null || uri.equals("")) { uri = msg.getRequestHeader().getURI().toString(); } if (param == null) { param = ""; } alert.setDetail(description, uri, param, attack, otherInfo, solution, this.getReference(), evidence, cweId, wascId, msg); parent.alertFound(alert); } /** * Check i * * @param msg * @return */ protected boolean isFileExist(HttpMessage msg) { return parent.getAnalyser().isFileExist(msg); } /** * Check if this test should be stopped. It should be checked periodically * in Plugin (eg when in loops) so the HostProcess can stop this Plugin * cleanly. * * @return */ protected boolean isStop() { // ZAP: added skipping controls return parent.isStop() || parent.isSkipped(this); } /** * @return Returns if this test is enabled. */ @Override public boolean isEnabled() { return getProperty("enabled").equals("1"); } @Override public boolean isVisible() { return true; } /** * Enable this test */ @Override public void setEnabled(boolean enabled) { if (enabled) { setProperty("enabled", "1"); } else { setProperty("enabled", "0"); } } @Override public AlertThreshold getAlertThreshold() { return this.getAlertThreshold(false); } @Override public AlertThreshold getAlertThreshold(boolean incDefault) { AlertThreshold level = null; try { level = AlertThreshold.valueOf(getProperty("level")); //log.debug("getAlertThreshold from configs: " + level.name()); } catch (Exception e) { // Ignore } if (level == null) { if (this.isEnabled()) { if (incDefault) { level = AlertThreshold.DEFAULT; } else { level = defaultAttackThreshold; } //log.debug("getAlertThreshold default: " + level.name()); } else { level = AlertThreshold.OFF; //log.debug("getAlertThreshold not enabled: " + level.name()); } } else if (level.equals(AlertThreshold.DEFAULT)) { if (incDefault) { level = AlertThreshold.DEFAULT; } else { level = defaultAttackThreshold; } //log.debug("getAlertThreshold default: " + level.name()); } return level; } @Override public void setAlertThreshold(AlertThreshold level) { setProperty("level", level.name()); } @Override public void setDefaultAlertThreshold(AlertThreshold level) { this.defaultAttackThreshold = level; } /** * Override this if you plugin supports other levels. */ @Override public AlertThreshold[] getAlertThresholdsSupported() { return alertThresholdsSupported; } @Override public AttackStrength getAttackStrength(boolean incDefault) { AttackStrength level = null; try { level = AttackStrength.valueOf(getProperty("strength")); //log.debug("getAttackStrength from configs: " + level.name()); } catch (Exception e) { // Ignore } if (level == null) { if (incDefault) { level = AttackStrength.DEFAULT; } else { level = this.defaultAttackStrength; } //log.debug("getAttackStrength default: " + level.name()); } else if (level.equals(AttackStrength.DEFAULT)) { if (incDefault) { level = AttackStrength.DEFAULT; } else { level = this.defaultAttackStrength; } //log.debug("getAttackStrength default: " + level.name()); } return level; } @Override public AttackStrength getAttackStrength() { return this.getAttackStrength(false); } @Override public void setAttackStrength(AttackStrength level) { setProperty("strength", level.name()); } @Override public void setDefaultAttackStrength(AttackStrength strength) { this.defaultAttackStrength = strength; } /** * Override this if you plugin supports other levels. */ @Override public AttackStrength[] getAttackStrengthsSupported() { return attackStrengthsSupported; } /** * Compare if 2 plugin is the same. */ @Override public int compareTo(Object obj) { int result = -1; if (obj instanceof AbstractPlugin) { AbstractPlugin test = (AbstractPlugin) obj; if (getId() < test.getId()) { result = -1; } else if (getId() > test.getId()) { result = 1; } else { result = 0; } } return result; } @Override public boolean equals(Object obj) { if (compareTo(obj) == 0) { return true; } return false; } /** * Check if the given pattern can be found in the header. * * @param msg * @param header name. * @param pattern * @return true if the pattern can be found. */ protected boolean matchHeaderPattern(HttpMessage msg, String header, Pattern pattern) { if (msg.getResponseHeader().isEmpty()) { return false; } String val = msg.getResponseHeader().getHeader(header); if (val == null) { return false; } Matcher matcher = pattern.matcher(val); return matcher.find(); } /** * Check if the given pattern can be found in the msg body. If the supplied * StringBuilder is not null, append the result to the StringBuilder. * * @param msg * @param pattern * @param sb * @return true if the pattern can be found. */ protected boolean matchBodyPattern(HttpMessage msg, Pattern pattern, StringBuilder sb) { // ZAP: Changed the type of the parameter "sb" to StringBuilder. Matcher matcher = pattern.matcher(msg.getResponseBody().toString()); boolean result = matcher.find(); if (result) { if (sb != null) { sb.append(matcher.group()); } } return result; } /** * Write a progress update message. Currently this just display in * System.out * * @param msg */ protected void writeProgress(String msg) { //System.out.println(msg); } /** * Get the parent HostProcess. * * @return */ //ZAP: Changed from protected to public access modifier. public HostProcess getParent() { return parent; } @Override public abstract void notifyPluginCompleted(HostProcess parent); /** * Replace body by stripping of pattern string. The URLencoded and * URLdecoded pattern will also be stripped off. This is mainly used for * stripping off a testing string in HTTP response for comparison against * the original response. Reference: TestInjectionSQL * * @param body * @param pattern * @return */ protected String stripOff(String body, String pattern) { String urlEncodePattern = getURLEncode(pattern); String urlDecodePattern = getURLDecode(pattern); String htmlEncodePattern1 = getHTMLEncode(pattern); String htmlEncodePattern2 = getHTMLEncode(urlEncodePattern); String htmlEncodePattern3 = getHTMLEncode(urlDecodePattern); String result = body.replaceAll("\\Q" + pattern + "\\E", "").replaceAll("\\Q" + urlEncodePattern + "\\E", "").replaceAll("\\Q" + urlDecodePattern + "\\E", ""); result = result.replaceAll("\\Q" + htmlEncodePattern1 + "\\E", "").replaceAll("\\Q" + htmlEncodePattern2 + "\\E", "").replaceAll("\\Q" + htmlEncodePattern3 + "\\E", ""); return result; } public static String getURLEncode(String msg) { String result = ""; try { result = URLEncoder.encode(msg, "UTF8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } return result; } public static String getURLDecode(String msg) { String result = ""; try { result = URLDecoder.decode(msg, "UTF8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } return result; } public static String getHTMLEncode(String msg) { String result = msg.replaceAll("<", "&#60;"); result = result.replaceAll(">", "&#62;"); return result; } protected Kb getKb() { return getParent().getKb(); } protected Logger getLog() { return log; } public String getProperty(String key) { return this.getProperty(config, key); } private String getProperty(Configuration conf, String key) { return conf.getString("plugins." + "p" + getId() + "." + key); } public void setProperty(String key, String value) { this.setProperty(config, key, value); } private void setProperty(Configuration conf, String key, String value) { conf.setProperty("plugins." + "p" + getId() + "." + key, value); } @Override public void setConfig(Configuration config) { this.config = config; } @Override public Configuration getConfig() { return config; } @Override public void saveTo(Configuration conf) { if (getProperty("enabled") == null) { setProperty(conf, "enabled", "1"); } setProperty(conf, "level", getProperty("level")); setProperty(conf, "strength", getProperty("strength")); } @Override public void loadFrom(Configuration conf) { if (getProperty(conf, "enabled") == null) { setProperty("enabled", "1"); } setProperty("level", getProperty(conf, "level")); setProperty("strength", getProperty(conf, "strength")); } @Override public void cloneInto(Plugin plugin) { if (plugin instanceof AbstractPlugin) { AbstractPlugin ap = (AbstractPlugin) plugin; ap.setEnabled(this.isEnabled()); ap.setAlertThreshold(this.getAlertThreshold(true)); ap.setAttackStrength(this.getAttackStrength(true)); ap.setDefaultAlertThreshold(this.defaultAttackThreshold); ap.setDefaultAttackStrength(this.defaultAttackStrength); ap.setTechSet(this.getTechSet()); ap.setStatus(this.getStatus()); ap.saveTo(plugin.getConfig()); } else { throw new InvalidParameterException("Not an AbstractPlugin"); } } /** * Check and create necessary parameter in config file if not already * present. * */ @Override public void createParamIfNotExist() { if (getProperty("enabled") == null) { setProperty("enabled", "1"); } } // ZAP Added isDepreciated @Override public boolean isDepreciated() { return false; } /** * @since 2.2.0 */ @Override public int getRisk() { return Alert.RISK_MEDIUM; } @Override public int getDelayInMs() { return delayInMs; } @Override public void setDelayInMs(int delayInMs) { this.delayInMs = delayInMs; } @Override public boolean inScope(Tech tech) { return this.techSet == null || this.techSet.includes(tech); } @Override public void setTechSet(TechSet ts) { this.techSet = ts; } /** * Returns the technologies enabled for the scan. * * @return a {@code TechSet} with the technologies enabled for the scan. * @see #inScope(Tech) * @see #targets(TechSet) */ public TechSet getTechSet() { return this.techSet; } /** * Returns {@code true} by default. * * @see #getTechSet() */ @Override public boolean targets(TechSet technologies) { return true; } @Override public Date getTimeStarted() { return this.started; } @Override public Date getTimeFinished() { return this.finished; } @Override public void setTimeStarted() { this.started = new Date(); this.finished = null; } @Override public void setTimeFinished() { this.finished = new Date(); } @Override public int getCweId() { // Default 'unknown' value return 0; } @Override public int getWascId() { // Default 'unknown' value return 0; } public AddOn.Status getStatus() { return status; } public void setStatus(AddOn.Status status) { this.status = status; } }
Percent encoding is a generalization of the text escaping method defined for URIs in [RFC 3986](http://tools.ietf.org/html/rfc3986#section-2.1). Unlike C backslash escaping, which requires that every reserved character be explicitly named (eg. 0x0a corresponds to \n), percent encoding can easily accommodate an arbitrary set of reserved characters. For the specific case of URI escaping, the percentcoding library also provides a 10x faster drop-in replacement for the `urllib.quote`, `urllib.unquote`, `urllib.quote_plus`, and `urllib.unquote_plus` functions. A unit test suite is included. ## Examples ## As a faster replacement for `urllib.quote` and `urllib.unquote`: #!/usr/bin/env python from percentcoding import quote, unquote str = "This is a test!" escaped = quote(str) print escaped assert(str == unquote(escaped)) Escaping whitespace in whitespace-delimited records: #!/usr/bin/env python import percentcoding import string ascii = set([chr(c) for c in xrange(255)]) whitespace = set([c for c in string.whitespace]) safe = ''.join( ascii - whitespace ) codec = percentcoding.Codec(safe) record = [ "a\nleaf\nfalls", " X\tY\tZ " ] print " ".join([ codec.encode(v) for v in record]) ## Performance ## The `percentcoding` library is about 10x faster than the standard `urllib.quote` and `urllib.unquote` implementations. This is not surprising; the standard implementations are pure Python. $ ./benchmark.py percentcodec.encode x 10000 0.348151922226 percentcodec.decode x 10000 0.381587028503 urllib.quote x 10000 4.51035284996 urllib.unquote x 10000 3.50923490524 ## Notes ## (TODO: move into pydoc) All ASCII characters *not* occurring in the safe set are considered unsafe and will be escaped by `encode`. With `quote` and `unquote`, the `'+'` character does not map to a space, as is necessary for processing `application/x-www-form-urlencoded`. Like `urllib`, `percentcoding` exports `quote_plus` and `unquote_plus` for that. The `"%%"` character sequence decodes to `'%'`, but is not the canonical encoding. When decoding, if an invalid hex sequence is encountered (eg `"%az"`), it is copied as-is. Per the spec, Unicode and UTF-8 strings are encoded byte-wise, resulting in an ASCII string. When decoding, the result is also an ASCII string, which if originally Unicode can be recovered using the Python string method `decode`: unquote(s).decode('utf8') ## Installation ## Ubuntu / Debian users: fakeroot ./debian/rules binary dpkg -i ../python-percentcoding*.deb If there's no "real" packaging for your system yet: ./setup.py build_ext --inplace ./test.py ./setup.py build ./setup.py install
/*globals jQuery, ajaxurl, wpml_st_disable_notices_strings */ function wpml_st_hide_strings_scan_notices(element, callback) { "use strict"; var buttonNo = wpml_st_disable_notices_strings.no; var buttonYes = wpml_st_disable_notices_strings.yes; var dialog = jQuery('<div title="' + wpml_st_disable_notices_strings.title + '"><p>' + wpml_st_disable_notices_strings.message + '</p></div>'); dialog.css('display', 'none'); dialog.dialog({ resizable: false, height: "auto", width: "auto", modal: true, buttons: { buttonNo: { text: buttonNo, click: function () { if (typeof callback === 'function') { callback(); } dialog.dialog("close"); } }, buttonYes: { text: buttonYes, class: 'button-primary', click: function () { jQuery.ajax({ url: ajaxurl, type: 'POST', data: { action: 'hide_strings_scan_notices' }, dataType: 'json', complete: function () { if (typeof callback === 'function') { callback(); } dialog.dialog("close"); } }); } } } }); }
/* ssl/ssl_task.c */ /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) * All rights reserved. * * This package is an SSL implementation written * by Eric Young (eay@cryptsoft.com). * The implementation was written so as to conform with Netscapes SSL. * * This library is free for commercial and non-commercial use as long as * the following conditions are aheared to. The following conditions * apply to all code found in this distribution, be it the RC4, RSA, * lhash, DES, etc., code; not just the SSL code. The SSL documentation * included with this distribution is covered by the same copyright terms * except that the holder is Tim Hudson (tjh@cryptsoft.com). * * Copyright remains Eric Young's, and as such any Copyright notices in * the code are not to be removed. * If this package is used in a product, Eric Young should be given attribution * as the author of the parts of the library used. * This can be in the form of a textual message at program startup or * in documentation (online or textual) provided with the package. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * "This product includes cryptographic software written by * Eric Young (eay@cryptsoft.com)" * The word 'cryptographic' can be left out if the rouines from the library * being used are not cryptographic related :-). * 4. If you include any Windows specific code (or a derivative thereof) from * the apps directory (application code) you must include an acknowledgement: * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" * * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * The licence and distribution terms for any publically available version or * derivative of this code cannot be changed. i.e. this code cannot simply be * copied and put under another distribution licence * [including the GNU Public Licence.] */ /* VMS */ /* * DECnet object for servicing SSL. We accept the inbound and speak a * simple protocol for multiplexing the 2 data streams (application and * ssl data) over this logical link. * * Logical names: * SSL_CIPHER Defines a list of cipher specifications the server * will support in order of preference. * SSL_SERVER_CERTIFICATE * Points to PEM (privacy enhanced mail) file that * contains the server certificate and private password. * SYS$NET Logical created by netserver.exe as hook for completing * DECnet logical link. * * Each NSP message sent over the DECnet link has the following structure: * struct rpc_msg { * char channel; * char function; * short length; * char data[MAX_DATA]; * } msg; * * The channel field designates the virtual data stream this message applies * to and is one of: * A - Application data (payload). * R - Remote client connection that initiated the SSL connection. Encrypted * data is sent over this connection. * G - General data, reserved for future use. * * The data streams are half-duplex read/write and have following functions: * G - Get, requests that up to msg.length bytes of data be returned. The * data is returned in the next 'C' function response that matches the * requesting channel. * P - Put, requests that the first msg.length bytes of msg.data be appended * to the designated stream. * C - Confirms a get or put. Every get and put will get a confirm response, * you cannot initiate another function on a channel until the previous * operation has been confirmed. * * The 2 channels may interleave their operations, for example: * Server msg Client msg * A, Get, 4092 ----> * <---- R, get, 4092 * R, Confirm, {hello} ----> * <---- R, put, {srv hello} * R, Confirm, 0 ----> * . (SSL handshake completed) * . (read first app data). * <---- A, confirm, {http data} * A, Put, {http data} ----> * <---- A, confirm, 0 * * The length field is not permitted to be larger that 4092 bytes. * * Author: Dave Jones * Date: 22-JUL-1996 */ #include <stdlib.h> #include <stdio.h> #include <iodef.h> /* VMS IO$_ definitions */ #include <descrip.h> /* VMS string descriptors */ extern int SYS$QIOW(), SYS$ASSIGN(); int LIB$INIT_TIMER(), LIB$SHOW_TIMER(); #include <string.h> /* from ssltest.c */ #include <errno.h> #include "e_os.h" #include <openssl/buffer.h> #include <openssl/x509.h> #include <openssl/ssl.h> #include <openssl/err.h> int MS_CALLBACK verify_callback(int ok, X509 *xs, X509 *xi, int depth, int error); BIO *bio_err=NULL; BIO *bio_stdout=NULL; BIO_METHOD *BIO_s_rtcp(); static char *cipher=NULL; int verbose=1; #ifdef FIONBIO static int s_nbio=0; #endif #define TEST_SERVER_CERT "SSL_SERVER_CERTIFICATE" /*************************************************************************/ struct rpc_msg { /* Should have member alignment inhibited */ char channel; /* 'A'-app data. 'R'-remote client 'G'-global */ char function; /* 'G'-get, 'P'-put, 'C'-confirm, 'X'-close */ unsigned short int length; /* Amount of data returned or max to return */ char data[4092]; /* variable data */ }; #define RPC_HDR_SIZE (sizeof(struct rpc_msg) - 4092) static $DESCRIPTOR(sysnet, "SYS$NET"); typedef unsigned short io_channel; struct io_status { unsigned short status; unsigned short count; unsigned long stsval; }; int doit(io_channel chan, SSL_CTX *s_ctx ); /*****************************************************************************/ /* Decnet I/O routines. */ static int get ( io_channel chan, char *buffer, int maxlen, int *length ) { int status; struct io_status iosb; status = SYS$QIOW ( 0, chan, IO$_READVBLK, &iosb, 0, 0, buffer, maxlen, 0, 0, 0, 0 ); if ( (status&1) == 1 ) status = iosb.status; if ( (status&1) == 1 ) *length = iosb.count; return status; } static int put ( io_channel chan, char *buffer, int length ) { int status; struct io_status iosb; status = SYS$QIOW ( 0, chan, IO$_WRITEVBLK, &iosb, 0, 0, buffer, length, 0, 0, 0, 0 ); if ( (status&1) == 1 ) status = iosb.status; return status; } /***************************************************************************/ /* Handle operations on the 'G' channel. */ static int general_request ( io_channel chan, struct rpc_msg *msg, int length ) { return 48; } /***************************************************************************/ int main ( int argc, char **argv ) { int status, length; io_channel chan; struct rpc_msg msg; char *CApath=NULL,*CAfile=NULL; int badop=0; int ret=1; int client_auth=0; int server_auth=0; SSL_CTX *s_ctx=NULL; /* * Confirm logical link with initiating client. */ LIB$INIT_TIMER(); status = SYS$ASSIGN ( &sysnet, &chan, 0, 0, 0 ); printf("status of assign to SYS$NET: %d\n", status ); /* * Initialize standard out and error files. */ if (bio_err == NULL) if ((bio_err=BIO_new(BIO_s_file())) != NULL) BIO_set_fp(bio_err,stderr,BIO_NOCLOSE); if (bio_stdout == NULL) if ((bio_stdout=BIO_new(BIO_s_file())) != NULL) BIO_set_fp(bio_stdout,stdout,BIO_NOCLOSE); /* * get the preferred cipher list and other initialization */ if (cipher == NULL) cipher=getenv("SSL_CIPHER"); printf("cipher list: %s\n", cipher ? cipher : "{undefined}" ); SSL_load_error_strings(); OpenSSL_add_all_algorithms(); /* DRM, this was the original, but there is no such thing as SSLv2() s_ctx=SSL_CTX_new(SSLv2()); */ s_ctx=SSL_CTX_new(SSLv2_server_method()); if (s_ctx == NULL) goto end; SSL_CTX_use_certificate_file(s_ctx,TEST_SERVER_CERT,SSL_FILETYPE_PEM); SSL_CTX_use_RSAPrivateKey_file(s_ctx,TEST_SERVER_CERT,SSL_FILETYPE_PEM); printf("Loaded server certificate: '%s'\n", TEST_SERVER_CERT ); /* * Take commands from client until bad status. */ LIB$SHOW_TIMER(); status = doit ( chan, s_ctx ); LIB$SHOW_TIMER(); /* * do final cleanup and exit. */ end: if (s_ctx != NULL) SSL_CTX_free(s_ctx); LIB$SHOW_TIMER(); return 1; } int doit(io_channel chan, SSL_CTX *s_ctx ) { int status, length, link_state; struct rpc_msg msg; SSL *s_ssl=NULL; BIO *c_to_s=NULL; BIO *s_to_c=NULL; BIO *c_bio=NULL; BIO *s_bio=NULL; int i; int done=0; s_ssl=SSL_new(s_ctx); if (s_ssl == NULL) goto err; c_to_s=BIO_new(BIO_s_rtcp()); s_to_c=BIO_new(BIO_s_rtcp()); if ((s_to_c == NULL) || (c_to_s == NULL)) goto err; /* original, DRM 24-SEP-1997 BIO_set_fd ( c_to_s, "", chan ); BIO_set_fd ( s_to_c, "", chan ); */ BIO_set_fd ( c_to_s, 0, chan ); BIO_set_fd ( s_to_c, 0, chan ); c_bio=BIO_new(BIO_f_ssl()); s_bio=BIO_new(BIO_f_ssl()); if ((c_bio == NULL) || (s_bio == NULL)) goto err; SSL_set_accept_state(s_ssl); SSL_set_bio(s_ssl,c_to_s,s_to_c); BIO_set_ssl(s_bio,s_ssl,BIO_CLOSE); /* We can always do writes */ printf("Begin doit main loop\n"); /* * Link states: 0-idle, 1-read pending, 2-write pending, 3-closed. */ for (link_state = 0; link_state < 3; ) { /* * Wait for remote end to request data action on A channel. */ while ( link_state == 0 ) { status = get ( chan, (char *) &msg, sizeof(msg), &length ); if ( (status&1) == 0 ) { printf("Error in main loop get: %d\n", status ); link_state = 3; break; } if ( length < RPC_HDR_SIZE ) { printf("Error in main loop get size: %d\n", length ); break; link_state = 3; } if ( msg.channel != 'A' ) { printf("Error in main loop, unexpected channel: %c\n", msg.channel ); break; link_state = 3; } if ( msg.function == 'G' ) { link_state = 1; } else if ( msg.function == 'P' ) { link_state = 2; /* write pending */ } else if ( msg.function == 'X' ) { link_state = 3; } else { link_state = 3; } } if ( link_state == 1 ) { i = BIO_read ( s_bio, msg.data, msg.length ); if ( i < 0 ) link_state = 3; else { msg.channel = 'A'; msg.function = 'C'; /* confirm */ msg.length = i; status = put ( chan, (char *) &msg, i+RPC_HDR_SIZE ); if ( (status&1) == 0 ) break; link_state = 0; } } else if ( link_state == 2 ) { i = BIO_write ( s_bio, msg.data, msg.length ); if ( i < 0 ) link_state = 3; else { msg.channel = 'A'; msg.function = 'C'; /* confirm */ msg.length = 0; status = put ( chan, (char *) &msg, RPC_HDR_SIZE ); if ( (status&1) == 0 ) break; link_state = 0; } } } fprintf(stdout,"DONE\n"); err: /* We have to set the BIO's to NULL otherwise they will be * free()ed twice. Once when th s_ssl is SSL_free()ed and * again when c_ssl is SSL_free()ed. * This is a hack required because s_ssl and c_ssl are sharing the same * BIO structure and SSL_set_bio() and SSL_free() automatically * BIO_free non NULL entries. * You should not normally do this or be required to do this */ s_ssl->rbio=NULL; s_ssl->wbio=NULL; if (c_to_s != NULL) BIO_free(c_to_s); if (s_to_c != NULL) BIO_free(s_to_c); if (c_bio != NULL) BIO_free(c_bio); if (s_bio != NULL) BIO_free(s_bio); return(0); }
package streamanalytics // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/azure" "net/http" ) // SubscriptionsClient is the stream Analytics Client type SubscriptionsClient struct { ManagementClient } // NewSubscriptionsClient creates an instance of the SubscriptionsClient client. func NewSubscriptionsClient(subscriptionID string) SubscriptionsClient { return NewSubscriptionsClientWithBaseURI(DefaultBaseURI, subscriptionID) } // NewSubscriptionsClientWithBaseURI creates an instance of the SubscriptionsClient client. func NewSubscriptionsClientWithBaseURI(baseURI string, subscriptionID string) SubscriptionsClient { return SubscriptionsClient{NewWithBaseURI(baseURI, subscriptionID)} } // ListQuotas retrieves the subscription's current quota information in a particular region. // // location is the region in which to retrieve the subscription's quota information. You can find out which regions // Azure Stream Analytics is supported in here: https://azure.microsoft.com/en-us/regions/ func (client SubscriptionsClient) ListQuotas(location string) (result SubscriptionQuotasListResult, err error) { req, err := client.ListQuotasPreparer(location) if err != nil { err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", nil, "Failure preparing request") return } resp, err := client.ListQuotasSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", resp, "Failure sending request") return } result, err = client.ListQuotasResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "streamanalytics.SubscriptionsClient", "ListQuotas", resp, "Failure responding to request") } return } // ListQuotasPreparer prepares the ListQuotas request. func (client SubscriptionsClient) ListQuotasPreparer(location string) (*http.Request, error) { pathParameters := map[string]interface{}{ "location": autorest.Encode("path", location), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2016-03-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.StreamAnalytics/locations/{location}/quotas", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare(&http.Request{}) } // ListQuotasSender sends the ListQuotas request. The method will close the // http.Response Body if it receives an error. func (client SubscriptionsClient) ListQuotasSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // ListQuotasResponder handles the response to the ListQuotas request. The method always // closes the http.Response Body. func (client SubscriptionsClient) ListQuotasResponder(resp *http.Response) (result SubscriptionQuotasListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return }
<?php namespace Predis\Command; /** * @link http://redis.io/commands/rpop * @author Daniele Alessandri <suppakilla@gmail.com> */ class ListPopLast extends Command { /** * {@inheritdoc} */ public function getId() { return 'RPOP'; } }
<?xml version="1.0" encoding="UTF-8"?> <!-- This file is part of the Sylius package. (c) Paweł Jędrzejewski For the full copyright and license information, please view the LICENSE file that was distributed with this source code. --> <doctrine-mapping xmlns="http://doctrine-project.org/schemas/orm/doctrine-mapping" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:gedmo="http://gediminasm.org/schemas/orm/doctrine-extensions-mapping" xsi:schemaLocation="http://doctrine-project.org/schemas/orm/doctrine-mapping http://doctrine-project.org/schemas/orm/doctrine-mapping.xsd"> <mapped-superclass name="Sylius\Component\Core\Model\OrderItemUnit" table="sylius_order_item_unit"> <field name="inventoryState" column="inventory_state" type="string" /> <field name="shippingState" column="shipping_state" type="string" nullable="false" /> <field name="createdAt" column="created_at" type="datetime"> <gedmo:timestampable on="create"/> </field> <field name="updatedAt" column="updated_at" type="datetime" nullable="true"> <gedmo:timestampable on="update"/> </field> <many-to-one field="shipment" target-entity="Sylius\Component\Shipping\Model\ShipmentInterface" inversed-by="units"> <join-column name="shipment_id" referenced-column-name="id" nullable="true" on-delete="SET NULL" /> </many-to-one> </mapped-superclass> </doctrine-mapping>
<resources> <!-- ********************** --> <!-- Material Drawer Dimens --> <!-- ********************** --> <!-- General drawer width https://medium.com/sebs-top-tips/material-navigation-drawer-sizing-558aea1ad266 --> <dimen name="material_drawer_width">320dp</dimen> <!-- Definition for the Drawer Slider margin right --> <dimen name="material_drawer_margin">0dp</dimen> <!-- General padding value --> <dimen name="material_drawer_padding">8dp</dimen> <!-- General vertical padding values / 24dp for tablets --> <dimen name="material_drawer_vertical_padding">16dp</dimen> <!-- Sticky footer elevation --> <dimen name="material_drawer_sticky_footer_elevation">4dp</dimen> <!-- General margins for the drawer account header --> <dimen name="material_drawer_account_header_height">148dp</dimen> <dimen name="material_drawer_account_header_height_compact">72dp</dimen> <dimen name="material_drawer_account_header_horizontal_top">16dp</dimen> <dimen name="material_drawer_account_header_horizontal_bottom">4dp</dimen> <dimen name="material_drawer_account_header_selected">64dp</dimen> <dimen name="material_drawer_account_header_secondary">40dp</dimen> <dimen name="material_drawer_account_header_compact">56dp</dimen> <dimen name="material_drawer_account_header_text">14sp</dimen> <dimen name="material_drawer_account_header_dropdown">22dp</dimen> <dimen name="material_drawer_account_header_dropdown_padding">5dp</dimen> <dimen name="material_drawer_account_header_dropdown_margin_bottom">14dp</dimen> <!-- General profileIcon margins for the drawer item --> <dimen name="material_drawer_item_padding">8dp</dimen> <!-- Definition for the primary item --> <dimen name="material_drawer_item_primary">48dp</dimen> <dimen name="material_drawer_item_primary_icon">56dp</dimen> <dimen name="material_drawer_item_primary_icon_padding">12dp</dimen> <dimen name="material_drawer_item_primary_icon_padding_right">32dp</dimen> <dimen name="material_drawer_item_primary_text">14sp</dimen> <dimen name="material_drawer_item_primary_description">14sp</dimen> <dimen name="material_drawer_item_secondary_description">12sp</dimen> <!-- Definition for the secondary item --> <dimen name="material_drawer_item_secondary">42dp</dimen> <dimen name="material_drawer_item_secondary_icon">56dp</dimen> <dimen name="material_drawer_item_secondary_icon_padding">12dp</dimen> <dimen name="material_drawer_item_secondary_icon_padding_right">36dp</dimen> <dimen name="material_drawer_item_secondary_text">14sp</dimen> <dimen name="material_drawer_item_secondary_extra_text">12sp</dimen> <!-- Definition for the profile item --> <dimen name="material_drawer_item_profile">72dp</dimen> <dimen name="material_drawer_item_profile_icon">40dp</dimen> <dimen name="material_drawer_item_profile_icon_width">48dp</dimen> <dimen name="material_drawer_item_profile_icon_padding">16dp</dimen> <dimen name="material_drawer_item_profile_icon_padding_right">8dp</dimen> <dimen name="material_drawer_item_profile_text">14sp</dimen> <dimen name="material_drawer_item_profile_description">14sp</dimen> <!-- Definition for the profile setting item --> <dimen name="material_drawer_item_profile_setting_icon_padding">24dp</dimen> <!-- Definition for the MiniDrawer item --> <dimen name="material_mini_drawer_item">72dp</dimen> <dimen name="material_mini_drawer_item_padding">4dp</dimen> <dimen name="material_mini_drawer_item_padding_sides">8dp</dimen> <dimen name="material_mini_drawer_item_icon">56dp</dimen> <dimen name="material_mini_drawer_item_icon_padding">16dp</dimen> <dimen name="material_mini_drawer_item_badge_text">12sp</dimen> <dimen name="material_mini_drawer_item_profile_icon">40dp</dimen> <dimen name="material_mini_drawer_item_profile_icon_padding">16dp</dimen> <!-- Definition for the section item --> <dimen name="material_drawer_item_section_text">14sp</dimen> </resources>
Patch submission guidelines [1]_ -------------------------------- Here are some guidelines about how you can contribute to Nikola: * First, make sure there is an open issue for your change. Perhaps, if it's a new feature, you probably want to `discuss it first <http://groups.google.com/group/nikola-discuss>`_ * **Create a new Git branch specific to your change(s).** For example, if you're adding a new feature to foo the bars, do something like the following:: $ git checkout master $ git pull $ git checkout -b foo-the-bars <hack hack hack> $ git push origin HEAD <submit pull request based on your new 'foo-the-bars' branch> This makes life much easier for maintainers if you have (or ever plan to have) additional changes in your own ``master`` branch. Also, if you have commit rights to the main Nikola repository, we suggest having your branch there, instead of a personal fork. .. admonition:: A corollary: Please **don't put multiple fixes/features in the same branch/pull request**! In other words, if you're hacking on new feature X and find a bugfix that doesn't *require* new feature X, **make a new distinct branch and PR** for the bugfix. * You may want to use the `Tim Pope’s Git commit messages standard <http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html>`_. It’s not necessary, but if you are doing something big, we recommend describing it in the commit message. * While working, **rebase instead of merging** (if possible). We encourage using ``git rebase`` instead of ``git merge``. If you are using ``git pull``, please run ``git config pull.rebase true`` to prevent merges from happening and replace them with rebase goodness. There is also an “emergency switch” in case rebases fail and you do not know what to do: ``git pull --no-rebase``. * **Make sure documentation is updated** — at the very least, keep docstrings current, and if necessary, update the reStructuredText documentation in ``docs/``. * **Add a changelog entry** at the top of ``CHANGES.txt`` mentioning issue number and in the correct Features/Bugfixes section. (while creating the new changelog entry, put it in the version that does not exist (consult setup.py) or create a new section) * **Run flake8** for style consistency. Use ``flake8 --ignore=E501 .`` * **Try writing some tests** if possible — again, following existing tests is often easiest, and a good way to tell whether the feature you are modifying is easily testable. You will find instructions in ``tests/README.rst``. (alternatively you can push and wait for Travis to pick up and test your changes, but we encourage to run them locally before pushing.) * Make sure to mention the issue it affects in the description of your pull request, so it's clear what to test and how to do it. * There are some quirks to how Nikola's codebase is structured, and to how some things need to be done [2]_ but don't worry, we'll guide you! .. [1] Very inspired by `fabric's <https://github.com/fabric/fabric/blob/master/CONTRIBUTING.rst>`_ thanks! .. [2] For example, logging, or always making sure directories are created using ``utils.makedirs()``
package com.google.cloud.container.v1; import static com.google.cloud.container.v1.ClusterManagerClient.ListUsableSubnetworksPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ClientSettings; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.cloud.container.v1.stub.ClusterManagerStubSettings; import com.google.container.v1.CancelOperationRequest; import com.google.container.v1.Cluster; import com.google.container.v1.CompleteIPRotationRequest; import com.google.container.v1.CompleteNodePoolUpgradeRequest; import com.google.container.v1.CreateClusterRequest; import com.google.container.v1.CreateNodePoolRequest; import com.google.container.v1.DeleteClusterRequest; import com.google.container.v1.DeleteNodePoolRequest; import com.google.container.v1.GetClusterRequest; import com.google.container.v1.GetJSONWebKeysRequest; import com.google.container.v1.GetJSONWebKeysResponse; import com.google.container.v1.GetNodePoolRequest; import com.google.container.v1.GetOperationRequest; import com.google.container.v1.GetServerConfigRequest; import com.google.container.v1.ListClustersRequest; import com.google.container.v1.ListClustersResponse; import com.google.container.v1.ListNodePoolsRequest; import com.google.container.v1.ListNodePoolsResponse; import com.google.container.v1.ListOperationsRequest; import com.google.container.v1.ListOperationsResponse; import com.google.container.v1.ListUsableSubnetworksRequest; import com.google.container.v1.ListUsableSubnetworksResponse; import com.google.container.v1.NodePool; import com.google.container.v1.Operation; import com.google.container.v1.RollbackNodePoolUpgradeRequest; import com.google.container.v1.ServerConfig; import com.google.container.v1.SetAddonsConfigRequest; import com.google.container.v1.SetLabelsRequest; import com.google.container.v1.SetLegacyAbacRequest; import com.google.container.v1.SetLocationsRequest; import com.google.container.v1.SetLoggingServiceRequest; import com.google.container.v1.SetMaintenancePolicyRequest; import com.google.container.v1.SetMasterAuthRequest; import com.google.container.v1.SetMonitoringServiceRequest; import com.google.container.v1.SetNetworkPolicyRequest; import com.google.container.v1.SetNodePoolAutoscalingRequest; import com.google.container.v1.SetNodePoolManagementRequest; import com.google.container.v1.SetNodePoolSizeRequest; import com.google.container.v1.StartIPRotationRequest; import com.google.container.v1.UpdateClusterRequest; import com.google.container.v1.UpdateMasterRequest; import com.google.container.v1.UpdateNodePoolRequest; import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link ClusterManagerClient}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (container.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of listClusters to 30 seconds: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * ClusterManagerSettings.Builder clusterManagerSettingsBuilder = * ClusterManagerSettings.newBuilder(); * clusterManagerSettingsBuilder * .listClustersSettings() * .setRetrySettings( * clusterManagerSettingsBuilder.listClustersSettings().getRetrySettings().toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * ClusterManagerSettings clusterManagerSettings = clusterManagerSettingsBuilder.build(); * }</pre> */ @Generated("by gapic-generator-java") public class ClusterManagerSettings extends ClientSettings<ClusterManagerSettings> { /** Returns the object with the settings used for calls to listClusters. */ public UnaryCallSettings<ListClustersRequest, ListClustersResponse> listClustersSettings() { return ((ClusterManagerStubSettings) getStubSettings()).listClustersSettings(); } /** Returns the object with the settings used for calls to getCluster. */ public UnaryCallSettings<GetClusterRequest, Cluster> getClusterSettings() { return ((ClusterManagerStubSettings) getStubSettings()).getClusterSettings(); } /** Returns the object with the settings used for calls to createCluster. */ public UnaryCallSettings<CreateClusterRequest, Operation> createClusterSettings() { return ((ClusterManagerStubSettings) getStubSettings()).createClusterSettings(); } /** Returns the object with the settings used for calls to updateCluster. */ public UnaryCallSettings<UpdateClusterRequest, Operation> updateClusterSettings() { return ((ClusterManagerStubSettings) getStubSettings()).updateClusterSettings(); } /** Returns the object with the settings used for calls to updateNodePool. */ public UnaryCallSettings<UpdateNodePoolRequest, Operation> updateNodePoolSettings() { return ((ClusterManagerStubSettings) getStubSettings()).updateNodePoolSettings(); } /** Returns the object with the settings used for calls to setNodePoolAutoscaling. */ public UnaryCallSettings<SetNodePoolAutoscalingRequest, Operation> setNodePoolAutoscalingSettings() { return ((ClusterManagerStubSettings) getStubSettings()).setNodePoolAutoscalingSettings(); } /** Returns the object with the settings used for calls to setLoggingService. */ public UnaryCallSettings<SetLoggingServiceRequest, Operation> setLoggingServiceSettings() { return ((ClusterManagerStubSettings) getStubSettings()).setLoggingServiceSettings(); } /** Returns the object with the settings used for calls to setMonitoringService. */ public UnaryCallSettings<SetMonitoringServiceRequest, Operation> setMonitoringServiceSettings() { return ((ClusterManagerStubSettings) getStubSettings()).setMonitoringServiceSettings(); } /** Returns the object with the settings used for calls to setAddonsConfig. */ public UnaryCallSettings<SetAddonsConfigRequest, Operation> setAddonsConfigSettings() { return ((ClusterManagerStubSettings) getStubSettings()).setAddonsConfigSettings(); } /** * Returns the object with the settings used for calls to setLocations. * * @deprecated This method is deprecated and will be removed in the next major version update. */ @Deprecated public UnaryCallSettings<SetLocationsRequest, Operation> setLocationsSettings() { return ((ClusterManagerStubSettings) getStubSettings()).setLocationsSettings(); } /** Returns the object with the settings used for calls to updateMaster. */ public UnaryCallSettings<UpdateMasterRequest, Operation> updateMasterSettings() { return ((ClusterManagerStubSettings) getStubSettings()).updateMasterSettings(); } /** Returns the object with the settings used for calls to setMasterAuth. */ public UnaryCallSettings<SetMasterAuthRequest, Operation> setMasterAuthSettings() { return ((ClusterManagerStubSettings) getStubSettings()).setMasterAuthSettings(); } /** Returns the object with the settings used for calls to deleteCluster. */ public UnaryCallSettings<DeleteClusterRequest, Operation> deleteClusterSettings() { return ((ClusterManagerStubSettings) getStubSettings()).deleteClusterSettings(); } /** Returns the object with the settings used for calls to listOperations. */ public UnaryCallSettings<ListOperationsRequest, ListOperationsResponse> listOperationsSettings() { return ((ClusterManagerStubSettings) getStubSettings()).listOperationsSettings(); } /** Returns the object with the settings used for calls to get. */ public UnaryCallSettings<GetOperationRequest, Operation> getOperationSettings() { return ((ClusterManagerStubSettings) getStubSettings()).getOperationSettings(); } /** Returns the object with the settings used for calls to cancel. */ public UnaryCallSettings<CancelOperationRequest, Empty> cancelOperationSettings() { return ((ClusterManagerStubSettings) getStubSettings()).cancelOperationSettings(); } /** Returns the object with the settings used for calls to getServerConfig. */ public UnaryCallSettings<GetServerConfigRequest, ServerConfig> getServerConfigSettings() { return ((ClusterManagerStubSettings) getStubSettings()).getServerConfigSettings(); } /** Returns the object with the settings used for calls to getJSONWebKeys. */ public UnaryCallSettings<GetJSONWebKeysRequest, GetJSONWebKeysResponse> getJSONWebKeysSettings() { return ((ClusterManagerStubSettings) getStubSettings()).getJSONWebKeysSettings(); } /** Returns the object with the settings used for calls to listNodePools. */ public UnaryCallSettings<ListNodePoolsRequest, ListNodePoolsResponse> listNodePoolsSettings() { return ((ClusterManagerStubSettings) getStubSettings()).listNodePoolsSettings(); } /** Returns the object with the settings used for calls to getNodePool. */ public UnaryCallSettings<GetNodePoolRequest, NodePool> getNodePoolSettings() { return ((ClusterManagerStubSettings) getStubSettings()).getNodePoolSettings(); } /** Returns the object with the settings used for calls to createNodePool. */ public UnaryCallSettings<CreateNodePoolRequest, Operation> createNodePoolSettings() { return ((ClusterManagerStubSettings) getStubSettings()).createNodePoolSettings(); } /** Returns the object with the settings used for calls to deleteNodePool. */ public UnaryCallSettings<DeleteNodePoolRequest, Operation> deleteNodePoolSettings() { return ((ClusterManagerStubSettings) getStubSettings()).deleteNodePoolSettings(); } /** Returns the object with the settings used for calls to completeNodePoolUpgrade. */ public UnaryCallSettings<CompleteNodePoolUpgradeRequest, Empty> completeNodePoolUpgradeSettings() { return ((ClusterManagerStubSettings) getStubSettings()).completeNodePoolUpgradeSettings(); } /** Returns the object with the settings used for calls to rollbackNodePoolUpgrade. */ public UnaryCallSettings<RollbackNodePoolUpgradeRequest, Operation> rollbackNodePoolUpgradeSettings() { return ((ClusterManagerStubSettings) getStubSettings()).rollbackNodePoolUpgradeSettings(); } /** Returns the object with the settings used for calls to setNodePoolManagement. */ public UnaryCallSettings<SetNodePoolManagementRequest, Operation> setNodePoolManagementSettings() { return ((ClusterManagerStubSettings) getStubSettings()).setNodePoolManagementSettings(); } /** Returns the object with the settings used for calls to setLabels. */ public UnaryCallSettings<SetLabelsRequest, Operation> setLabelsSettings() { return ((ClusterManagerStubSettings) getStubSettings()).setLabelsSettings(); } /** Returns the object with the settings used for calls to setLegacyAbac. */ public UnaryCallSettings<SetLegacyAbacRequest, Operation> setLegacyAbacSettings() { return ((ClusterManagerStubSettings) getStubSettings()).setLegacyAbacSettings(); } /** Returns the object with the settings used for calls to startIPRotation. */ public UnaryCallSettings<StartIPRotationRequest, Operation> startIPRotationSettings() { return ((ClusterManagerStubSettings) getStubSettings()).startIPRotationSettings(); } /** Returns the object with the settings used for calls to completeIPRotation. */ public UnaryCallSettings<CompleteIPRotationRequest, Operation> completeIPRotationSettings() { return ((ClusterManagerStubSettings) getStubSettings()).completeIPRotationSettings(); } /** Returns the object with the settings used for calls to setNodePoolSize. */ public UnaryCallSettings<SetNodePoolSizeRequest, Operation> setNodePoolSizeSettings() { return ((ClusterManagerStubSettings) getStubSettings()).setNodePoolSizeSettings(); } /** Returns the object with the settings used for calls to setNetworkPolicy. */ public UnaryCallSettings<SetNetworkPolicyRequest, Operation> setNetworkPolicySettings() { return ((ClusterManagerStubSettings) getStubSettings()).setNetworkPolicySettings(); } /** Returns the object with the settings used for calls to setMaintenancePolicy. */ public UnaryCallSettings<SetMaintenancePolicyRequest, Operation> setMaintenancePolicySettings() { return ((ClusterManagerStubSettings) getStubSettings()).setMaintenancePolicySettings(); } /** Returns the object with the settings used for calls to listUsableSubnetworks. */ public PagedCallSettings< ListUsableSubnetworksRequest, ListUsableSubnetworksResponse, ListUsableSubnetworksPagedResponse> listUsableSubnetworksSettings() { return ((ClusterManagerStubSettings) getStubSettings()).listUsableSubnetworksSettings(); } public static final ClusterManagerSettings create(ClusterManagerStubSettings stub) throws IOException { return new ClusterManagerSettings.Builder(stub.toBuilder()).build(); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return ClusterManagerStubSettings.defaultExecutorProviderBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return ClusterManagerStubSettings.getDefaultEndpoint(); } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return ClusterManagerStubSettings.getDefaultServiceScopes(); } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return ClusterManagerStubSettings.defaultCredentialsProviderBuilder(); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return ClusterManagerStubSettings.defaultGrpcTransportProviderBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return ClusterManagerStubSettings.defaultTransportChannelProvider(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ClusterManagerStubSettings.defaultApiClientHeaderProviderBuilder(); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected ClusterManagerSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); } /** Builder for ClusterManagerSettings. */ public static class Builder extends ClientSettings.Builder<ClusterManagerSettings, Builder> { protected Builder() throws IOException { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(ClusterManagerStubSettings.newBuilder(clientContext)); } protected Builder(ClusterManagerSettings settings) { super(settings.getStubSettings().toBuilder()); } protected Builder(ClusterManagerStubSettings.Builder stubSettings) { super(stubSettings); } private static Builder createDefault() { return new Builder(ClusterManagerStubSettings.newBuilder()); } public ClusterManagerStubSettings.Builder getStubSettingsBuilder() { return ((ClusterManagerStubSettings.Builder) getStubSettings()); } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods( getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); return this; } /** Returns the builder for the settings used for calls to listClusters. */ public UnaryCallSettings.Builder<ListClustersRequest, ListClustersResponse> listClustersSettings() { return getStubSettingsBuilder().listClustersSettings(); } /** Returns the builder for the settings used for calls to getCluster. */ public UnaryCallSettings.Builder<GetClusterRequest, Cluster> getClusterSettings() { return getStubSettingsBuilder().getClusterSettings(); } /** Returns the builder for the settings used for calls to createCluster. */ public UnaryCallSettings.Builder<CreateClusterRequest, Operation> createClusterSettings() { return getStubSettingsBuilder().createClusterSettings(); } /** Returns the builder for the settings used for calls to updateCluster. */ public UnaryCallSettings.Builder<UpdateClusterRequest, Operation> updateClusterSettings() { return getStubSettingsBuilder().updateClusterSettings(); } /** Returns the builder for the settings used for calls to updateNodePool. */ public UnaryCallSettings.Builder<UpdateNodePoolRequest, Operation> updateNodePoolSettings() { return getStubSettingsBuilder().updateNodePoolSettings(); } /** Returns the builder for the settings used for calls to setNodePoolAutoscaling. */ public UnaryCallSettings.Builder<SetNodePoolAutoscalingRequest, Operation> setNodePoolAutoscalingSettings() { return getStubSettingsBuilder().setNodePoolAutoscalingSettings(); } /** Returns the builder for the settings used for calls to setLoggingService. */ public UnaryCallSettings.Builder<SetLoggingServiceRequest, Operation> setLoggingServiceSettings() { return getStubSettingsBuilder().setLoggingServiceSettings(); } /** Returns the builder for the settings used for calls to setMonitoringService. */ public UnaryCallSettings.Builder<SetMonitoringServiceRequest, Operation> setMonitoringServiceSettings() { return getStubSettingsBuilder().setMonitoringServiceSettings(); } /** Returns the builder for the settings used for calls to setAddonsConfig. */ public UnaryCallSettings.Builder<SetAddonsConfigRequest, Operation> setAddonsConfigSettings() { return getStubSettingsBuilder().setAddonsConfigSettings(); } /** * Returns the builder for the settings used for calls to setLocations. * * @deprecated This method is deprecated and will be removed in the next major version update. */ @Deprecated public UnaryCallSettings.Builder<SetLocationsRequest, Operation> setLocationsSettings() { return getStubSettingsBuilder().setLocationsSettings(); } /** Returns the builder for the settings used for calls to updateMaster. */ public UnaryCallSettings.Builder<UpdateMasterRequest, Operation> updateMasterSettings() { return getStubSettingsBuilder().updateMasterSettings(); } /** Returns the builder for the settings used for calls to setMasterAuth. */ public UnaryCallSettings.Builder<SetMasterAuthRequest, Operation> setMasterAuthSettings() { return getStubSettingsBuilder().setMasterAuthSettings(); } /** Returns the builder for the settings used for calls to deleteCluster. */ public UnaryCallSettings.Builder<DeleteClusterRequest, Operation> deleteClusterSettings() { return getStubSettingsBuilder().deleteClusterSettings(); } /** Returns the builder for the settings used for calls to listOperations. */ public UnaryCallSettings.Builder<ListOperationsRequest, ListOperationsResponse> listOperationsSettings() { return getStubSettingsBuilder().listOperationsSettings(); } /** Returns the builder for the settings used for calls to get. */ public UnaryCallSettings.Builder<GetOperationRequest, Operation> getOperationSettings() { return getStubSettingsBuilder().getOperationSettings(); } /** Returns the builder for the settings used for calls to cancel. */ public UnaryCallSettings.Builder<CancelOperationRequest, Empty> cancelOperationSettings() { return getStubSettingsBuilder().cancelOperationSettings(); } /** Returns the builder for the settings used for calls to getServerConfig. */ public UnaryCallSettings.Builder<GetServerConfigRequest, ServerConfig> getServerConfigSettings() { return getStubSettingsBuilder().getServerConfigSettings(); } /** Returns the builder for the settings used for calls to getJSONWebKeys. */ public UnaryCallSettings.Builder<GetJSONWebKeysRequest, GetJSONWebKeysResponse> getJSONWebKeysSettings() { return getStubSettingsBuilder().getJSONWebKeysSettings(); } /** Returns the builder for the settings used for calls to listNodePools. */ public UnaryCallSettings.Builder<ListNodePoolsRequest, ListNodePoolsResponse> listNodePoolsSettings() { return getStubSettingsBuilder().listNodePoolsSettings(); } /** Returns the builder for the settings used for calls to getNodePool. */ public UnaryCallSettings.Builder<GetNodePoolRequest, NodePool> getNodePoolSettings() { return getStubSettingsBuilder().getNodePoolSettings(); } /** Returns the builder for the settings used for calls to createNodePool. */ public UnaryCallSettings.Builder<CreateNodePoolRequest, Operation> createNodePoolSettings() { return getStubSettingsBuilder().createNodePoolSettings(); } /** Returns the builder for the settings used for calls to deleteNodePool. */ public UnaryCallSettings.Builder<DeleteNodePoolRequest, Operation> deleteNodePoolSettings() { return getStubSettingsBuilder().deleteNodePoolSettings(); } /** Returns the builder for the settings used for calls to completeNodePoolUpgrade. */ public UnaryCallSettings.Builder<CompleteNodePoolUpgradeRequest, Empty> completeNodePoolUpgradeSettings() { return getStubSettingsBuilder().completeNodePoolUpgradeSettings(); } /** Returns the builder for the settings used for calls to rollbackNodePoolUpgrade. */ public UnaryCallSettings.Builder<RollbackNodePoolUpgradeRequest, Operation> rollbackNodePoolUpgradeSettings() { return getStubSettingsBuilder().rollbackNodePoolUpgradeSettings(); } /** Returns the builder for the settings used for calls to setNodePoolManagement. */ public UnaryCallSettings.Builder<SetNodePoolManagementRequest, Operation> setNodePoolManagementSettings() { return getStubSettingsBuilder().setNodePoolManagementSettings(); } /** Returns the builder for the settings used for calls to setLabels. */ public UnaryCallSettings.Builder<SetLabelsRequest, Operation> setLabelsSettings() { return getStubSettingsBuilder().setLabelsSettings(); } /** Returns the builder for the settings used for calls to setLegacyAbac. */ public UnaryCallSettings.Builder<SetLegacyAbacRequest, Operation> setLegacyAbacSettings() { return getStubSettingsBuilder().setLegacyAbacSettings(); } /** Returns the builder for the settings used for calls to startIPRotation. */ public UnaryCallSettings.Builder<StartIPRotationRequest, Operation> startIPRotationSettings() { return getStubSettingsBuilder().startIPRotationSettings(); } /** Returns the builder for the settings used for calls to completeIPRotation. */ public UnaryCallSettings.Builder<CompleteIPRotationRequest, Operation> completeIPRotationSettings() { return getStubSettingsBuilder().completeIPRotationSettings(); } /** Returns the builder for the settings used for calls to setNodePoolSize. */ public UnaryCallSettings.Builder<SetNodePoolSizeRequest, Operation> setNodePoolSizeSettings() { return getStubSettingsBuilder().setNodePoolSizeSettings(); } /** Returns the builder for the settings used for calls to setNetworkPolicy. */ public UnaryCallSettings.Builder<SetNetworkPolicyRequest, Operation> setNetworkPolicySettings() { return getStubSettingsBuilder().setNetworkPolicySettings(); } /** Returns the builder for the settings used for calls to setMaintenancePolicy. */ public UnaryCallSettings.Builder<SetMaintenancePolicyRequest, Operation> setMaintenancePolicySettings() { return getStubSettingsBuilder().setMaintenancePolicySettings(); } /** Returns the builder for the settings used for calls to listUsableSubnetworks. */ public PagedCallSettings.Builder< ListUsableSubnetworksRequest, ListUsableSubnetworksResponse, ListUsableSubnetworksPagedResponse> listUsableSubnetworksSettings() { return getStubSettingsBuilder().listUsableSubnetworksSettings(); } @Override public ClusterManagerSettings build() throws IOException { return new ClusterManagerSettings(this); } } }
<dom-module id="x-gchild"> <template> <!-- styles can be in templates --> <style> :host-context(.wide) #target { border: 10px solid orange; } </style> <div id="target">x-gchild</div> </template> </dom-module> <script> Polymer({ is: 'x-gchild' }); </script> <dom-module id="x-child"> <template> <div id="simple">simple</div> <div id="complex1" class="scoped">complex1</div> <div id="complex2" selected>complex2</div> <div id="media">media</div> <div id="shadow" class="shadowTarget">shadowTarget</div> <div id="deep" class="deepTarget">deepTarget</div> <x-gchild id="gchild1"></x-gchild> <x-gchild id="gchild2" class="wide"></x-gchild> </template> </dom-module> <script> Polymer({ is: 'x-child', hostAttributes: { class: 'nug' } }); </script> <dom-module id="x-child2"> <style> :host(.wide) #target{ border: none; } </style> <template> <div id="target">x-child2</div> </template> </dom-module> <script> Polymer({ is: 'x-child2', _scopeCssViaAttr: true }); </script> <dom-module id="x-scope-class"> <template> <div id="scope">Trivial</div> </template> </dom-module> <script> Polymer({ is: 'x-scope-class' }); </script> <dom-module id="x-styled"> <style> :host { display: block; border: 1px solid orange; } :host(.wide) { border-width: 2px; } #simple { border: 3px solid orange; } .scoped, [selected] { border: 4px solid pink; } @media(max-width: 10000px) { .media { border: 5px solid brown; } } .container ::content > * { border: 6px solid navy; } x-child::shadow .shadowTarget { border: 7px solid tomato; } x-child /deep/ .deepTarget { border: 8px solid red; } #priority { border: 9px solid orange; } x-child2.wide::shadow #target { border: 12px solid brown; } .container1 > ::content > .content1 { border: 13px solid navy; } .container2 > ::content .content2 { border: 14px solid navy; } .computed { border: 15px solid orange; } .computeda { border: 20px solid orange; } #child { border: 16px solid tomato; display: block; } svg { margin-top: 20px; } #circle { fill: seagreen; stroke-width: 1px; stroke: tomato; } </style> <template> <content select=".blank"></content> <div id="simple">simple</div> <div id="complex1" class="scoped">complex1</div> <div id="complex2" selected>complex2</div> <div id="media" class="media">media</div> <div class="container1"> <content select=".content1"></content> </div> <div class="container2"> <content select=".content2"></content> </div> <div class="container"> <content></content> </div> <x-child id="child"></x-child> <div id="priority">priority</div> <x-child2 class="wide" id="child2"></x-child2> <div id="computed" class$="{{computeClass(aClass)}}">Computed</div> <div id="repeatContainer"> <template id="repeat" is="dom-repeat" items="{{items}}"> <a class$="{{aaClass}}">A Computed</a> </template> </div> <svg height="25" width="25"> <circle id="circle" cx="12" cy="12" r="10"></circle> </svg> <x-scope-class id="scopeClass"></x-scope-class> </template> </dom-module> <script> Polymer({ is: 'x-styled', properties: { items: {value: [{}]} }, computeClass: function(className) { return className; } }); </script> <dom-module id="x-button"> <style> :host { border: 10px solid beige; } :host(.special) { border: 11px solid beige; } </style> <template> Button! </template> </dom-module> <script> Polymer({ is: 'x-button', extends: 'button' }); </script> <dom-module id="x-Mixed-Case"> <style> :host { border: 13px solid beige; } </style> <template> Mixed-Case </template> <script> Polymer({ is: 'x-Mixed-Case' }); </script> </dom-module> <dom-module id="x-Mixed-Case-Button"> <style> :host { border: 14px solid beige; } </style> <template> Mixed-Case </template> <script> Polymer({ is: 'x-Mixed-Case-Button', extends: 'button' }); </script> </dom-module> <template id="dynamic"> <div class="added"> Added <div class="sub-added"> Sub-added </div> </div> </div> </template> <dom-module id="x-dynamic-scope"> <style> .added { border: 17px solid beige; } .sub-added { border: 18px solid #fafafa; } </style> <template> <div id="container"></div> </template> </dom-module> <script> (function() { var doc = document._currentScript.ownerDocument; var dynamic = doc.querySelector('template#dynamic'); Polymer({ is: 'x-dynamic-scope', ready: function() { // setup node for scope watching this.scopeSubtree(this.$.container, true); // simulate 3rd party action by using normal dom to add to element. var dom = document.importNode(dynamic.content, true); this.$.container.appendChild(dom); } }); })(); </script>
"""Defines input readers for MapReduce.""" __all__ = [ "AbstractDatastoreInputReader", "ALLOW_CHECKPOINT", "BadReaderParamsError", "BlobstoreLineInputReader", "BlobstoreZipInputReader", "BlobstoreZipLineInputReader", "COUNTER_IO_READ_BYTES", "COUNTER_IO_READ_MSEC", "DatastoreEntityInputReader", "DatastoreInputReader", "DatastoreKeyInputReader", "FileInputReader", "RandomStringInputReader", "RawDatastoreInputReader", "Error", "InputReader", "LogInputReader", "NamespaceInputReader", "RecordsReader", ] # pylint: disable=g-bad-name # pylint: disable=protected-access import base64 import copy import logging import pickle import random import string import StringIO import time import zipfile from google.net.proto import ProtocolBuffer from google.appengine.ext import ndb from google.appengine.api import datastore from google.appengine.api import files from google.appengine.api import logservice from google.appengine.api.files import file_service_pb from google.appengine.api.logservice import log_service_pb from google.appengine.ext import blobstore from google.appengine.ext import db from google.appengine.ext import key_range from google.appengine.ext.db import metadata from mapreduce import context from mapreduce import datastore_range_iterators as db_iters from mapreduce import errors from mapreduce import file_format_parser from mapreduce import file_format_root from mapreduce import json_util from mapreduce import key_ranges from mapreduce import model from mapreduce import namespace_range from mapreduce import operation from mapreduce import property_range from mapreduce import records from mapreduce import util # pylint: disable=g-import-not-at-top # TODO(user): Cleanup imports if/when cloudstorage becomes part of runtime. try: # Check if the full cloudstorage package exists. The stub part is in runtime. import cloudstorage if hasattr(cloudstorage, "_STUB"): cloudstorage = None except ImportError: pass # CloudStorage library not available # Classes moved to errors module. Copied here for compatibility. Error = errors.Error BadReaderParamsError = errors.BadReaderParamsError # Counter name for number of bytes read. COUNTER_IO_READ_BYTES = "io-read-bytes" # Counter name for milliseconds spent reading data. COUNTER_IO_READ_MSEC = "io-read-msec" # Special value that can be yielded by InputReaders if they want to give the # framework an opportunity to save the state of the mapreduce without having # to yield an actual value to the handler. ALLOW_CHECKPOINT = object() class InputReader(json_util.JsonMixin): """Abstract base class for input readers. InputReaders have the following properties: * They are created by using the split_input method to generate a set of InputReaders from a MapperSpec. * They generate inputs to the mapper via the iterator interface. * After creation, they can be serialized and resumed using the JsonMixin interface. * They are cast to string for a user-readable description; it may be valuable to implement __str__. """ # When expand_parameters is False, then value yielded by reader is passed # to handler as is. If it's true, then *value is passed, expanding arguments # and letting handler be a multi-parameter function. expand_parameters = False # Mapreduce parameters. _APP_PARAM = "_app" NAMESPACE_PARAM = "namespace" NAMESPACES_PARAM = "namespaces" # Obsolete. def __iter__(self): return self def next(self): """Returns the next input from this input reader as a key, value pair. Returns: The next input from this input reader. """ raise NotImplementedError("next() not implemented in %s" % self.__class__) @classmethod def from_json(cls, input_shard_state): """Creates an instance of the InputReader for the given input shard state. Args: input_shard_state: The InputReader state as a dict-like object. Returns: An instance of the InputReader configured using the values of json. """ raise NotImplementedError("from_json() not implemented in %s" % cls) def to_json(self): """Returns an input shard state for the remaining inputs. Returns: A json-izable version of the remaining InputReader. """ raise NotImplementedError("to_json() not implemented in %s" % self.__class__) @classmethod def split_input(cls, mapper_spec): """Returns a list of input readers. This method creates a list of input readers, each for one shard. It attempts to split inputs among readers evenly. Args: mapper_spec: model.MapperSpec specifies the inputs and additional parameters to define the behavior of input readers. Returns: A list of InputReaders. None or [] when no input data can be found. """ raise NotImplementedError("split_input() not implemented in %s" % cls) @classmethod def validate(cls, mapper_spec): """Validates mapper spec and all mapper parameters. Input reader parameters are expected to be passed as "input_reader" subdictionary in mapper_spec.params. Pre 1.6.4 API mixes input reader parameters with all other parameters. Thus to be compatible, input reader check mapper_spec.params as well and issue a warning if "input_reader" subdicationary is not present. Args: mapper_spec: The MapperSpec for this InputReader. Raises: BadReaderParamsError: required parameters are missing or invalid. """ if mapper_spec.input_reader_class() != cls: raise BadReaderParamsError("Input reader class mismatch") def _get_params(mapper_spec, allowed_keys=None, allow_old=True): """Obtain input reader parameters. Utility function for input readers implementation. Fetches parameters from mapreduce specification giving appropriate usage warnings. Args: mapper_spec: The MapperSpec for the job allowed_keys: set of all allowed keys in parameters as strings. If it is not None, then parameters are expected to be in a separate "input_reader" subdictionary of mapper_spec parameters. allow_old: Allow parameters to exist outside of the input_reader subdictionary for compatability. Returns: mapper parameters as dict Raises: BadReaderParamsError: if parameters are invalid/missing or not allowed. """ if "input_reader" not in mapper_spec.params: message = ("Input reader's parameters should be specified in " "input_reader subdictionary.") if not allow_old or allowed_keys: raise errors.BadReaderParamsError(message) params = mapper_spec.params params = dict((str(n), v) for n, v in params.iteritems()) else: if not isinstance(mapper_spec.params.get("input_reader"), dict): raise errors.BadReaderParamsError( "Input reader parameters should be a dictionary") params = mapper_spec.params.get("input_reader") params = dict((str(n), v) for n, v in params.iteritems()) if allowed_keys: params_diff = set(params.keys()) - allowed_keys if params_diff: raise errors.BadReaderParamsError( "Invalid input_reader parameters: %s" % ",".join(params_diff)) return params class FileInputReader(InputReader): """Reader to read Files API files of user specified format. This class currently only supports Google Storage files. It will be extended to support blobstore files in the future. Reader Parameters: files: a list of filenames or filename patterns. filename must be of format '/gs/bucket/filename'. filename pattern has format '/gs/bucket/prefix*'. filename pattern will be expanded to filenames with the given prefix. Please see parseGlob in the file api.files.gs.py which is included in the App Engine SDK for supported patterns. Example: ["/gs/bucket1/file1", "/gs/bucket2/*", "/gs/bucket3/p*"] includes "file1", all files under bucket2, and files under bucket3 with a prefix "p" in its name. format: format string determines what your map function gets as its input. format string can be "lines", "bytes", "zip", or a cascade of them plus optional parameters. See file_formats.FORMATS for all supported formats. See file_format_parser._FileFormatParser for format string syntax. Example: "lines": your map function gets files' contents line by line. "bytes": your map function gets files' contents entirely. "zip": InputReader unzips files and feeds your map function each of the archive's member files as a whole. "zip[bytes]: same as above. "zip[lines]": InputReader unzips files and feeds your map function files' contents line by line. "zip[lines(encoding=utf32)]": InputReader unzips files, reads each file with utf32 encoding and feeds your map function line by line. "base64[zip[lines(encoding=utf32)]]: InputReader decodes files with base64 encoding, unzips each file, reads each of them with utf32 encoding and feeds your map function line by line. Note that "encoding" only teaches InputReader how to interpret files. The input your map function gets is always a Python str. """ # Reader Parameters FILES_PARAM = "files" FORMAT_PARAM = "format" def __init__(self, format_root): """Initialize input reader. Args: format_root: a FileFormatRoot instance. """ self._file_format_root = format_root def __iter__(self): """Inherit docs.""" return self def next(self): """Inherit docs.""" ctx = context.get() start_time = time.time() content = self._file_format_root.next().read() if ctx: operation.counters.Increment( COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx) operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx) return content @classmethod def split_input(cls, mapper_spec): """Inherit docs.""" params = _get_params(mapper_spec) # Expand potential file patterns to a list of filenames. filenames = [] for f in params[cls.FILES_PARAM]: parsedName = files.gs.parseGlob(f) if isinstance(parsedName, tuple): filenames.extend(files.gs.listdir(parsedName[0], {"prefix": parsedName[1]})) else: filenames.append(parsedName) file_format_roots = file_format_root.split(filenames, params[cls.FORMAT_PARAM], mapper_spec.shard_count) if file_format_roots is None: return [] return [cls(root) for root in file_format_roots] @classmethod def validate(cls, mapper_spec): """Inherit docs.""" if mapper_spec.input_reader_class() != cls: raise BadReaderParamsError("Mapper input reader class mismatch") # Check parameters. params = _get_params(mapper_spec) if cls.FILES_PARAM not in params: raise BadReaderParamsError("Must specify %s" % cls.FILES_PARAM) if cls.FORMAT_PARAM not in params: raise BadReaderParamsError("Must specify %s" % cls.FORMAT_PARAM) format_string = params[cls.FORMAT_PARAM] if not isinstance(format_string, basestring): raise BadReaderParamsError("format should be string but is %s" % cls.FORMAT_PARAM) try: file_format_parser.parse(format_string) except ValueError, e: raise BadReaderParamsError(e) paths = params[cls.FILES_PARAM] if not (paths and isinstance(paths, list)): raise BadReaderParamsError("files should be a list of filenames.") # Further validations are done by parseGlob(). try: for path in paths: files.gs.parseGlob(path) except files.InvalidFileNameError: raise BadReaderParamsError("Invalid filename %s." % path) @classmethod def from_json(cls, json): """Inherit docs.""" return cls( file_format_root.FileFormatRoot.from_json(json["file_format_root"])) def to_json(self): """Inherit docs.""" return {"file_format_root": self._file_format_root.to_json()} class AbstractDatastoreInputReader(InputReader): """Abstract class for datastore input readers.""" # Number of entities to fetch at once while doing scanning. _BATCH_SIZE = 50 # Maximum number of shards we'll create. _MAX_SHARD_COUNT = 256 # The maximum number of namespaces that will be sharded by datastore key # before switching to a strategy where sharding is done lexographically by # namespace. MAX_NAMESPACES_FOR_KEY_SHARD = 10 # reader parameters. ENTITY_KIND_PARAM = "entity_kind" KEYS_ONLY_PARAM = "keys_only" BATCH_SIZE_PARAM = "batch_size" KEY_RANGE_PARAM = "key_range" FILTERS_PARAM = "filters" _KEY_RANGE_ITER_CLS = db_iters.AbstractKeyRangeIterator def __init__(self, iterator): """Create new DatastoreInputReader object. This is internal constructor. Use split_input to create readers instead. Args: iterator: an iterator that generates objects for this input reader. """ self._iter = iterator def __iter__(self): """Yields whatever internal iterator yields.""" for o in self._iter: yield o def __str__(self): """Returns the string representation of this InputReader.""" return repr(self._iter) def to_json(self): """Serializes input reader to json compatible format. Returns: all the data in json-compatible map. """ return self._iter.to_json() @classmethod def from_json(cls, json): """Create new DatastoreInputReader from json, encoded by to_json. Args: json: json representation of DatastoreInputReader. Returns: an instance of DatastoreInputReader with all data deserialized from json. """ return cls(db_iters.RangeIteratorFactory.from_json(json)) @classmethod def _get_query_spec(cls, mapper_spec): """Construct a model.QuerySpec from model.MapperSpec.""" params = _get_params(mapper_spec) entity_kind = params[cls.ENTITY_KIND_PARAM] filters = params.get(cls.FILTERS_PARAM) app = params.get(cls._APP_PARAM) ns = params.get(cls.NAMESPACE_PARAM) return model.QuerySpec( entity_kind=cls._get_raw_entity_kind(entity_kind), keys_only=bool(params.get(cls.KEYS_ONLY_PARAM, False)), filters=filters, batch_size=int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE)), model_class_path=entity_kind, app=app, ns=ns) @classmethod def split_input(cls, mapper_spec): """Inherit doc.""" shard_count = mapper_spec.shard_count query_spec = cls._get_query_spec(mapper_spec) namespaces = None if query_spec.ns is not None: k_ranges = cls._to_key_ranges_by_shard( query_spec.app, [query_spec.ns], shard_count, query_spec) else: ns_keys = namespace_range.get_namespace_keys( query_spec.app, cls.MAX_NAMESPACES_FOR_KEY_SHARD+1) # No namespace means the app may have some data but those data are not # visible yet. Just return. if not ns_keys: return # If the number of ns is small, we shard each ns by key and assign each # shard a piece of a ns. elif len(ns_keys) <= cls.MAX_NAMESPACES_FOR_KEY_SHARD: namespaces = [ns_key.name() or "" for ns_key in ns_keys] k_ranges = cls._to_key_ranges_by_shard( query_spec.app, namespaces, shard_count, query_spec) # When number of ns is large, we can only split lexicographically by ns. else: ns_ranges = namespace_range.NamespaceRange.split(n=shard_count, contiguous=False, can_query=lambda: True, _app=query_spec.app) k_ranges = [key_ranges.KeyRangesFactory.create_from_ns_range(ns_range) for ns_range in ns_ranges] iters = [db_iters.RangeIteratorFactory.create_key_ranges_iterator( r, query_spec, cls._KEY_RANGE_ITER_CLS) for r in k_ranges] return [cls(i) for i in iters] @classmethod def _to_key_ranges_by_shard(cls, app, namespaces, shard_count, query_spec): """Get a list of key_ranges.KeyRanges objects, one for each shard. This method uses scatter index to split each namespace into pieces and assign those pieces to shards. Args: app: app_id in str. namespaces: a list of namespaces in str. shard_count: number of shards to split. query_spec: model.QuerySpec. Returns: a list of key_ranges.KeyRanges objects. """ key_ranges_by_ns = [] # Split each ns into n splits. If a ns doesn't have enough scatter to # split into n, the last few splits are None. for namespace in namespaces: ranges = cls._split_ns_by_scatter( shard_count, namespace, query_spec.entity_kind, app) # The nth split of each ns will be assigned to the nth shard. # Shuffle so that None are not all by the end. random.shuffle(ranges) key_ranges_by_ns.append(ranges) # KeyRanges from different namespaces might be very different in size. # Use round robin to make sure each shard can have at most one split # or a None from a ns. ranges_by_shard = [[] for _ in range(shard_count)] for ranges in key_ranges_by_ns: for i, k_range in enumerate(ranges): if k_range: ranges_by_shard[i].append(k_range) key_ranges_by_shard = [] for ranges in ranges_by_shard: if ranges: key_ranges_by_shard.append(key_ranges.KeyRangesFactory.create_from_list( ranges)) return key_ranges_by_shard @classmethod def _split_ns_by_scatter(cls, shard_count, namespace, raw_entity_kind, app): """Split a namespace by scatter index into key_range.KeyRange. TODO(user): Power this with key_range.KeyRange.compute_split_points. Args: shard_count: number of shards. namespace: namespace name to split. str. raw_entity_kind: low level datastore API entity kind. app: app id in str. Returns: A list of key_range.KeyRange objects. If there are not enough entities to splits into requested shards, the returned list will contain KeyRanges ordered lexicographically with any Nones appearing at the end. """ if shard_count == 1: # With one shard we don't need to calculate any split points at all. return [key_range.KeyRange(namespace=namespace, _app=app)] ds_query = datastore.Query(kind=raw_entity_kind, namespace=namespace, _app=app, keys_only=True) ds_query.Order("__scatter__") oversampling_factor = 32 random_keys = ds_query.Get(shard_count * oversampling_factor) if not random_keys: # There are no entities with scatter property. We have no idea # how to split. return ([key_range.KeyRange(namespace=namespace, _app=app)] + [None] * (shard_count - 1)) random_keys.sort() if len(random_keys) >= shard_count: # We've got a lot of scatter values. Sample them down. random_keys = cls._choose_split_points(random_keys, shard_count) k_ranges = [] k_ranges.append(key_range.KeyRange( key_start=None, key_end=random_keys[0], direction=key_range.KeyRange.ASC, include_start=False, include_end=False, namespace=namespace, _app=app)) for i in range(0, len(random_keys) - 1): k_ranges.append(key_range.KeyRange( key_start=random_keys[i], key_end=random_keys[i+1], direction=key_range.KeyRange.ASC, include_start=True, include_end=False, namespace=namespace, _app=app)) k_ranges.append(key_range.KeyRange( key_start=random_keys[-1], key_end=None, direction=key_range.KeyRange.ASC, include_start=True, include_end=False, namespace=namespace, _app=app)) if len(k_ranges) < shard_count: # We need to have as many shards as it was requested. Add some Nones. k_ranges += [None] * (shard_count - len(k_ranges)) return k_ranges @classmethod def _choose_split_points(cls, sorted_keys, shard_count): """Returns the best split points given a random set of datastore.Keys.""" assert len(sorted_keys) >= shard_count index_stride = len(sorted_keys) / float(shard_count) return [sorted_keys[int(round(index_stride * i))] for i in range(1, shard_count)] @classmethod def validate(cls, mapper_spec): """Inherit docs.""" params = _get_params(mapper_spec) if cls.ENTITY_KIND_PARAM not in params: raise BadReaderParamsError("Missing input reader parameter 'entity_kind'") if cls.BATCH_SIZE_PARAM in params: try: batch_size = int(params[cls.BATCH_SIZE_PARAM]) if batch_size < 1: raise BadReaderParamsError("Bad batch size: %s" % batch_size) except ValueError, e: raise BadReaderParamsError("Bad batch size: %s" % e) try: bool(params.get(cls.KEYS_ONLY_PARAM, False)) except: raise BadReaderParamsError("keys_only expects a boolean value but got %s", params[cls.KEYS_ONLY_PARAM]) if cls.NAMESPACE_PARAM in params: if not isinstance(params[cls.NAMESPACE_PARAM], (str, unicode, type(None))): raise BadReaderParamsError( "Expected a single namespace string") if cls.NAMESPACES_PARAM in params: raise BadReaderParamsError("Multiple namespaces are no longer supported") if cls.FILTERS_PARAM in params: filters = params[cls.FILTERS_PARAM] if not isinstance(filters, list): raise BadReaderParamsError("Expected list for filters parameter") for f in filters: if not isinstance(f, (tuple, list)): raise BadReaderParamsError("Filter should be a tuple or list: %s", f) if len(f) != 3: raise BadReaderParamsError("Filter should be a 3-tuple: %s", f) prop, op, _ = f if not isinstance(prop, basestring): raise BadReaderParamsError("Property should be string: %s", prop) if not isinstance(op, basestring): raise BadReaderParamsError("Operator should be string: %s", op) @classmethod def _get_raw_entity_kind(cls, entity_kind_or_model_classpath): """Returns the entity kind to use with low level datastore calls. Args: entity_kind_or_model_classpath: user specified entity kind or model classpath. Returns: the entity kind in str to use with low level datastore calls. """ return entity_kind_or_model_classpath class RawDatastoreInputReader(AbstractDatastoreInputReader): """Iterates over an entity kind and yields datastore.Entity.""" _KEY_RANGE_ITER_CLS = db_iters.KeyRangeEntityIterator @classmethod def validate(cls, mapper_spec): """Inherit docs.""" super(RawDatastoreInputReader, cls).validate(mapper_spec) params = _get_params(mapper_spec) entity_kind = params[cls.ENTITY_KIND_PARAM] if "." in entity_kind: logging.warning( ". detected in entity kind %s specified for reader %s." "Assuming entity kind contains the dot.", entity_kind, cls.__name__) if cls.FILTERS_PARAM in params: filters = params[cls.FILTERS_PARAM] for f in filters: if f[1] != "=": raise BadReaderParamsError( "Only equality filters are supported: %s", f) class DatastoreInputReader(AbstractDatastoreInputReader): """Iterates over a Model and yields model instances. Supports both db.model and ndb.model. """ _KEY_RANGE_ITER_CLS = db_iters.KeyRangeModelIterator @classmethod def _get_raw_entity_kind(cls, model_classpath): entity_type = util.for_name(model_classpath) if isinstance(entity_type, db.Model): return entity_type.kind() elif isinstance(entity_type, (ndb.Model, ndb.MetaModel)): # pylint: disable=protected-access return entity_type._get_kind() else: return util.get_short_name(model_classpath) @classmethod def validate(cls, mapper_spec): """Inherit docs.""" super(DatastoreInputReader, cls).validate(mapper_spec) params = _get_params(mapper_spec) entity_kind = params[cls.ENTITY_KIND_PARAM] # Fail fast if Model cannot be located. try: model_class = util.for_name(entity_kind) except ImportError, e: raise BadReaderParamsError("Bad entity kind: %s" % e) if cls.FILTERS_PARAM in params: filters = params[cls.FILTERS_PARAM] if issubclass(model_class, db.Model): cls._validate_filters(filters, model_class) else: cls._validate_filters_ndb(filters, model_class) property_range.PropertyRange(filters, entity_kind) @classmethod def _validate_filters(cls, filters, model_class): """Validate user supplied filters. Validate filters are on existing properties and filter values have valid semantics. Args: filters: user supplied filters. Each filter should be a list or tuple of format (<property_name_as_str>, <query_operator_as_str>, <value_of_certain_type>). Value type is up to the property's type. model_class: the db.Model class for the entity type to apply filters on. Raises: BadReaderParamsError: if any filter is invalid in any way. """ if not filters: return properties = model_class.properties() for f in filters: prop, _, val = f if prop not in properties: raise errors.BadReaderParamsError( "Property %s is not defined for entity type %s", prop, model_class.kind()) # Validate the value of each filter. We need to know filters have # valid value to carry out splits. try: properties[prop].validate(val) except db.BadValueError, e: raise errors.BadReaderParamsError(e) @classmethod # pylint: disable=protected-access def _validate_filters_ndb(cls, filters, model_class): """Validate ndb.Model filters.""" if not filters: return properties = model_class._properties for f in filters: prop, _, val = f if prop not in properties: raise errors.BadReaderParamsError( "Property %s is not defined for entity type %s", prop, model_class._get_kind()) # Validate the value of each filter. We need to know filters have # valid value to carry out splits. try: properties[prop]._do_validate(val) except db.BadValueError, e: raise errors.BadReaderParamsError(e) @classmethod def split_input(cls, mapper_spec): """Inherit docs.""" shard_count = mapper_spec.shard_count query_spec = cls._get_query_spec(mapper_spec) if not property_range.should_shard_by_property_range(query_spec.filters): return super(DatastoreInputReader, cls).split_input(mapper_spec) p_range = property_range.PropertyRange(query_spec.filters, query_spec.model_class_path) p_ranges = p_range.split(shard_count) # User specified a namespace. if query_spec.ns: ns_range = namespace_range.NamespaceRange( namespace_start=query_spec.ns, namespace_end=query_spec.ns, _app=query_spec.app) ns_ranges = [copy.copy(ns_range) for _ in p_ranges] else: ns_keys = namespace_range.get_namespace_keys( query_spec.app, cls.MAX_NAMESPACES_FOR_KEY_SHARD+1) if not ns_keys: return # User doesn't specify ns but the number of ns is small. # We still split by property range. if len(ns_keys) <= cls.MAX_NAMESPACES_FOR_KEY_SHARD: ns_ranges = [namespace_range.NamespaceRange(_app=query_spec.app) for _ in p_ranges] # Lots of namespaces. Split by ns. else: ns_ranges = namespace_range.NamespaceRange.split(n=shard_count, contiguous=False, can_query=lambda: True, _app=query_spec.app) p_ranges = [copy.copy(p_range) for _ in ns_ranges] assert len(p_ranges) == len(ns_ranges) iters = [ db_iters.RangeIteratorFactory.create_property_range_iterator( p, ns, query_spec) for p, ns in zip(p_ranges, ns_ranges)] return [cls(i) for i in iters] class DatastoreKeyInputReader(RawDatastoreInputReader): """Iterate over an entity kind and yields datastore.Key.""" _KEY_RANGE_ITER_CLS = db_iters.KeyRangeKeyIterator # For backward compatibility. DatastoreEntityInputReader = RawDatastoreInputReader # TODO(user): Remove this after the only dependency GroomerMarkReader is class _OldAbstractDatastoreInputReader(InputReader): """Abstract base class for classes that iterate over datastore entities. Concrete subclasses must implement _iter_key_range(self, k_range). See the docstring for that method for details. """ # Number of entities to fetch at once while doing scanning. _BATCH_SIZE = 50 # Maximum number of shards we'll create. _MAX_SHARD_COUNT = 256 # __scatter__ oversampling factor _OVERSAMPLING_FACTOR = 32 # The maximum number of namespaces that will be sharded by datastore key # before switching to a strategy where sharding is done lexographically by # namespace. MAX_NAMESPACES_FOR_KEY_SHARD = 10 # Mapreduce parameters. ENTITY_KIND_PARAM = "entity_kind" KEYS_ONLY_PARAM = "keys_only" BATCH_SIZE_PARAM = "batch_size" KEY_RANGE_PARAM = "key_range" NAMESPACE_RANGE_PARAM = "namespace_range" CURRENT_KEY_RANGE_PARAM = "current_key_range" FILTERS_PARAM = "filters" # TODO(user): Add support for arbitrary queries. It's not possible to # support them without cursors since right now you can't even serialize query # definition. # pylint: disable=redefined-outer-name def __init__(self, entity_kind, key_ranges=None, ns_range=None, batch_size=_BATCH_SIZE, current_key_range=None, filters=None): """Create new AbstractDatastoreInputReader object. This is internal constructor. Use split_query in a concrete class instead. Args: entity_kind: entity kind as string. key_ranges: a sequence of key_range.KeyRange instances to process. Only one of key_ranges or ns_range can be non-None. ns_range: a namespace_range.NamespaceRange to process. Only one of key_ranges or ns_range can be non-None. batch_size: size of read batch as int. current_key_range: the current key_range.KeyRange being processed. filters: optional list of filters to apply to the query. Each filter is a tuple: (<property_name_as_str>, <query_operation_as_str>, <value>). User filters are applied first. """ assert key_ranges is not None or ns_range is not None, ( "must specify one of 'key_ranges' or 'ns_range'") assert key_ranges is None or ns_range is None, ( "can't specify both 'key_ranges ' and 'ns_range'") self._entity_kind = entity_kind # Reverse the KeyRanges so they can be processed in order as a stack of # work items. self._key_ranges = key_ranges and list(reversed(key_ranges)) self._ns_range = ns_range self._batch_size = int(batch_size) self._current_key_range = current_key_range self._filters = filters @classmethod def _get_raw_entity_kind(cls, entity_kind): if "." in entity_kind: logging.warning( ". detected in entity kind %s specified for reader %s." "Assuming entity kind contains the dot.", entity_kind, cls.__name__) return entity_kind def __iter__(self): """Iterates over the given KeyRanges or NamespaceRange. This method iterates over the given KeyRanges or NamespaceRange and sets the self._current_key_range to the KeyRange currently being processed. It then delegates to the _iter_key_range method to yield that actual results. Yields: Forwards the objects yielded by the subclasses concrete _iter_key_range() method. The caller must consume the result yielded because self.to_json() will not include it. """ if self._key_ranges is not None: for o in self._iter_key_ranges(): yield o elif self._ns_range is not None: for o in self._iter_ns_range(): yield o else: assert False, "self._key_ranges and self._ns_range are both None" def _iter_key_ranges(self): """Iterates over self._key_ranges, delegating to self._iter_key_range().""" while True: if self._current_key_range is None: if self._key_ranges: self._current_key_range = self._key_ranges.pop() # The most recently popped key_range may be None, so continue here # to find the next keyrange that's valid. continue else: break for key, o in self._iter_key_range( copy.deepcopy(self._current_key_range)): # The caller must consume yielded values so advancing the KeyRange # before yielding is safe. self._current_key_range.advance(key) yield o self._current_key_range = None def _iter_ns_range(self): """Iterates over self._ns_range, delegating to self._iter_key_range().""" while True: if self._current_key_range is None: query = self._ns_range.make_datastore_query() namespace_result = query.Get(1) if not namespace_result: break namespace = namespace_result[0].name() or "" self._current_key_range = key_range.KeyRange( namespace=namespace, _app=self._ns_range.app) yield ALLOW_CHECKPOINT for key, o in self._iter_key_range( copy.deepcopy(self._current_key_range)): # The caller must consume yielded values so advancing the KeyRange # before yielding is safe. self._current_key_range.advance(key) yield o if (self._ns_range.is_single_namespace or self._current_key_range.namespace == self._ns_range.namespace_end): break self._ns_range = self._ns_range.with_start_after( self._current_key_range.namespace) self._current_key_range = None def _iter_key_range(self, k_range): """Yields a db.Key and the value that should be yielded by self.__iter__(). Args: k_range: The key_range.KeyRange to iterate over. Yields: A 2-tuple containing the last db.Key processed and the value that should be yielded by __iter__. The returned db.Key will be used to determine the InputReader's current position in self._current_key_range. """ raise NotImplementedError("_iter_key_range() not implemented in %s" % self.__class__) def __str__(self): """Returns the string representation of this InputReader.""" if self._ns_range is None: return repr(self._key_ranges) else: return repr(self._ns_range) @classmethod def _choose_split_points(cls, sorted_keys, shard_count): """Returns the best split points given a random set of db.Keys.""" assert len(sorted_keys) >= shard_count index_stride = len(sorted_keys) / float(shard_count) return [sorted_keys[int(round(index_stride * i))] for i in range(1, shard_count)] # TODO(user): use query splitting functionality when it becomes available # instead. @classmethod def _split_input_from_namespace(cls, app, namespace, entity_kind, shard_count): """Helper for _split_input_from_params. If there are not enough Entities to make all of the given shards, the returned list of KeyRanges will include Nones. The returned list will contain KeyRanges ordered lexographically with any Nones appearing at the end. Args: app: the app. namespace: the namespace. entity_kind: entity kind as string. shard_count: the number of shards. Returns: KeyRange objects. """ raw_entity_kind = cls._get_raw_entity_kind(entity_kind) if shard_count == 1: # With one shard we don't need to calculate any splitpoints at all. return [key_range.KeyRange(namespace=namespace, _app=app)] ds_query = datastore.Query(kind=raw_entity_kind, namespace=namespace, _app=app, keys_only=True) ds_query.Order("__scatter__") random_keys = ds_query.Get(shard_count * cls._OVERSAMPLING_FACTOR) if not random_keys: # There are no entities with scatter property. We have no idea # how to split. return ([key_range.KeyRange(namespace=namespace, _app=app)] + [None] * (shard_count - 1)) random_keys.sort() if len(random_keys) >= shard_count: # We've got a lot of scatter values. Sample them down. random_keys = cls._choose_split_points(random_keys, shard_count) # pylint: disable=redefined-outer-name key_ranges = [] key_ranges.append(key_range.KeyRange( key_start=None, key_end=random_keys[0], direction=key_range.KeyRange.ASC, include_start=False, include_end=False, namespace=namespace, _app=app)) for i in range(0, len(random_keys) - 1): key_ranges.append(key_range.KeyRange( key_start=random_keys[i], key_end=random_keys[i+1], direction=key_range.KeyRange.ASC, include_start=True, include_end=False, namespace=namespace, _app=app)) key_ranges.append(key_range.KeyRange( key_start=random_keys[-1], key_end=None, direction=key_range.KeyRange.ASC, include_start=True, include_end=False, namespace=namespace, _app=app)) if len(key_ranges) < shard_count: # We need to have as many shards as it was requested. Add some Nones. key_ranges += [None] * (shard_count - len(key_ranges)) return key_ranges @classmethod def _split_input_from_params(cls, app, namespaces, entity_kind_name, params, shard_count): """Return input reader objects. Helper for split_input.""" # pylint: disable=redefined-outer-name key_ranges = [] # KeyRanges for all namespaces for namespace in namespaces: key_ranges.extend( cls._split_input_from_namespace(app, namespace, entity_kind_name, shard_count)) # Divide the KeyRanges into shard_count shards. The KeyRanges for different # namespaces might be very different in size so the assignment of KeyRanges # to shards is done round-robin. shared_ranges = [[] for _ in range(shard_count)] for i, k_range in enumerate(key_ranges): shared_ranges[i % shard_count].append(k_range) batch_size = int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE)) return [cls(entity_kind_name, key_ranges=key_ranges, ns_range=None, batch_size=batch_size) for key_ranges in shared_ranges if key_ranges] @classmethod def validate(cls, mapper_spec): """Validates mapper spec and all mapper parameters. Args: mapper_spec: The MapperSpec for this InputReader. Raises: BadReaderParamsError: required parameters are missing or invalid. """ if mapper_spec.input_reader_class() != cls: raise BadReaderParamsError("Input reader class mismatch") params = _get_params(mapper_spec) if cls.ENTITY_KIND_PARAM not in params: raise BadReaderParamsError("Missing mapper parameter 'entity_kind'") if cls.BATCH_SIZE_PARAM in params: try: batch_size = int(params[cls.BATCH_SIZE_PARAM]) if batch_size < 1: raise BadReaderParamsError("Bad batch size: %s" % batch_size) except ValueError, e: raise BadReaderParamsError("Bad batch size: %s" % e) if cls.NAMESPACE_PARAM in params: if not isinstance(params[cls.NAMESPACE_PARAM], (str, unicode, type(None))): raise BadReaderParamsError( "Expected a single namespace string") if cls.NAMESPACES_PARAM in params: raise BadReaderParamsError("Multiple namespaces are no longer supported") if cls.FILTERS_PARAM in params: filters = params[cls.FILTERS_PARAM] if not isinstance(filters, list): raise BadReaderParamsError("Expected list for filters parameter") for f in filters: if not isinstance(f, (tuple, list)): raise BadReaderParamsError("Filter should be a tuple or list: %s", f) if len(f) != 3: raise BadReaderParamsError("Filter should be a 3-tuple: %s", f) if not isinstance(f[0], basestring): raise BadReaderParamsError("First element should be string: %s", f) if f[1] != "=": raise BadReaderParamsError( "Only equality filters are supported: %s", f) @classmethod def split_input(cls, mapper_spec): """Splits query into shards without fetching query results. Tries as best as it can to split the whole query result set into equal shards. Due to difficulty of making the perfect split, resulting shards' sizes might differ significantly from each other. Args: mapper_spec: MapperSpec with params containing 'entity_kind'. May have 'namespace' in the params as a string containing a single namespace. If specified then the input reader will only yield values in the given namespace. If 'namespace' is not given then values from all namespaces will be yielded. May also have 'batch_size' in the params to specify the number of entities to process in each batch. Returns: A list of InputReader objects. If the query results are empty then the empty list will be returned. Otherwise, the list will always have a length equal to number_of_shards but may be padded with Nones if there are too few results for effective sharding. """ params = _get_params(mapper_spec) entity_kind_name = params[cls.ENTITY_KIND_PARAM] batch_size = int(params.get(cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE)) shard_count = mapper_spec.shard_count namespace = params.get(cls.NAMESPACE_PARAM) app = params.get(cls._APP_PARAM) filters = params.get(cls.FILTERS_PARAM) if namespace is None: # It is difficult to efficiently shard large numbers of namespaces because # there can be an arbitrary number of them. So the strategy is: # 1. if there are a small number of namespaces in the datastore then # generate one KeyRange per namespace per shard and assign each shard a # KeyRange for every namespace. This should lead to nearly perfect # sharding. # 2. if there are a large number of namespaces in the datastore then # generate one NamespaceRange per worker. This can lead to very bad # sharding because namespaces can contain very different numbers of # entities and each NamespaceRange may contain very different numbers # of namespaces. namespace_query = datastore.Query("__namespace__", keys_only=True, _app=app) namespace_keys = namespace_query.Get( limit=cls.MAX_NAMESPACES_FOR_KEY_SHARD+1) if len(namespace_keys) > cls.MAX_NAMESPACES_FOR_KEY_SHARD: ns_ranges = namespace_range.NamespaceRange.split(n=shard_count, contiguous=True, _app=app) return [cls(entity_kind_name, key_ranges=None, ns_range=ns_range, batch_size=batch_size, filters=filters) for ns_range in ns_ranges] elif not namespace_keys: return [cls(entity_kind_name, key_ranges=None, ns_range=namespace_range.NamespaceRange(_app=app), batch_size=shard_count, filters=filters)] else: namespaces = [namespace_key.name() or "" for namespace_key in namespace_keys] else: namespaces = [namespace] readers = cls._split_input_from_params( app, namespaces, entity_kind_name, params, shard_count) if filters: for reader in readers: reader._filters = filters return readers def to_json(self): """Serializes all the data in this query range into json form. Returns: all the data in json-compatible map. """ if self._key_ranges is None: key_ranges_json = None else: key_ranges_json = [] for k in self._key_ranges: if k: key_ranges_json.append(k.to_json()) else: key_ranges_json.append(None) if self._ns_range is None: namespace_range_json = None else: namespace_range_json = self._ns_range.to_json_object() if self._current_key_range is None: current_key_range_json = None else: current_key_range_json = self._current_key_range.to_json() json_dict = {self.KEY_RANGE_PARAM: key_ranges_json, self.NAMESPACE_RANGE_PARAM: namespace_range_json, self.CURRENT_KEY_RANGE_PARAM: current_key_range_json, self.ENTITY_KIND_PARAM: self._entity_kind, self.BATCH_SIZE_PARAM: self._batch_size, self.FILTERS_PARAM: self._filters} return json_dict @classmethod def from_json(cls, json): """Create new DatastoreInputReader from the json, encoded by to_json. Args: json: json map representation of DatastoreInputReader. Returns: an instance of DatastoreInputReader with all data deserialized from json. """ if json[cls.KEY_RANGE_PARAM] is None: # pylint: disable=redefined-outer-name key_ranges = None else: key_ranges = [] for k in json[cls.KEY_RANGE_PARAM]: if k: key_ranges.append(key_range.KeyRange.from_json(k)) else: key_ranges.append(None) if json[cls.NAMESPACE_RANGE_PARAM] is None: ns_range = None else: ns_range = namespace_range.NamespaceRange.from_json_object( json[cls.NAMESPACE_RANGE_PARAM]) if json[cls.CURRENT_KEY_RANGE_PARAM] is None: current_key_range = None else: current_key_range = key_range.KeyRange.from_json( json[cls.CURRENT_KEY_RANGE_PARAM]) return cls( json[cls.ENTITY_KIND_PARAM], key_ranges, ns_range, json[cls.BATCH_SIZE_PARAM], current_key_range, filters=json.get(cls.FILTERS_PARAM)) class BlobstoreLineInputReader(InputReader): """Input reader for a newline delimited blob in Blobstore.""" # TODO(user): Should we set this based on MAX_BLOB_FETCH_SIZE? _BLOB_BUFFER_SIZE = 64000 # Maximum number of shards to allow. _MAX_SHARD_COUNT = 256 # Maximum number of blobs to allow. _MAX_BLOB_KEYS_COUNT = 246 # Mapreduce parameters. BLOB_KEYS_PARAM = "blob_keys" # Serialization parmaeters. INITIAL_POSITION_PARAM = "initial_position" END_POSITION_PARAM = "end_position" BLOB_KEY_PARAM = "blob_key" def __init__(self, blob_key, start_position, end_position): """Initializes this instance with the given blob key and character range. This BlobstoreInputReader will read from the first record starting after strictly after start_position until the first record ending at or after end_position (exclusive). As an exception, if start_position is 0, then this InputReader starts reading at the first record. Args: blob_key: the BlobKey that this input reader is processing. start_position: the position to start reading at. end_position: a position in the last record to read. """ self._blob_key = blob_key self._blob_reader = blobstore.BlobReader(blob_key, self._BLOB_BUFFER_SIZE, start_position) self._end_position = end_position self._has_iterated = False self._read_before_start = bool(start_position) def next(self): """Returns the next input from as an (offset, line) tuple.""" self._has_iterated = True if self._read_before_start: self._blob_reader.readline() self._read_before_start = False start_position = self._blob_reader.tell() if start_position > self._end_position: raise StopIteration() line = self._blob_reader.readline() if not line: raise StopIteration() return start_position, line.rstrip("\n") def to_json(self): """Returns an json-compatible input shard spec for remaining inputs.""" new_pos = self._blob_reader.tell() if self._has_iterated: new_pos -= 1 return {self.BLOB_KEY_PARAM: self._blob_key, self.INITIAL_POSITION_PARAM: new_pos, self.END_POSITION_PARAM: self._end_position} def __str__(self): """Returns the string representation of this BlobstoreLineInputReader.""" return "blobstore.BlobKey(%r):[%d, %d]" % ( self._blob_key, self._blob_reader.tell(), self._end_position) @classmethod def from_json(cls, json): """Instantiates an instance of this InputReader for the given shard spec.""" return cls(json[cls.BLOB_KEY_PARAM], json[cls.INITIAL_POSITION_PARAM], json[cls.END_POSITION_PARAM]) @classmethod def validate(cls, mapper_spec): """Validates mapper spec and all mapper parameters. Args: mapper_spec: The MapperSpec for this InputReader. Raises: BadReaderParamsError: required parameters are missing or invalid. """ if mapper_spec.input_reader_class() != cls: raise BadReaderParamsError("Mapper input reader class mismatch") params = _get_params(mapper_spec) if cls.BLOB_KEYS_PARAM not in params: raise BadReaderParamsError("Must specify 'blob_keys' for mapper input") blob_keys = params[cls.BLOB_KEYS_PARAM] if isinstance(blob_keys, basestring): # This is a mechanism to allow multiple blob keys (which do not contain # commas) in a single string. It may go away. blob_keys = blob_keys.split(",") if len(blob_keys) > cls._MAX_BLOB_KEYS_COUNT: raise BadReaderParamsError("Too many 'blob_keys' for mapper input") if not blob_keys: raise BadReaderParamsError("No 'blob_keys' specified for mapper input") for blob_key in blob_keys: blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key)) if not blob_info: raise BadReaderParamsError("Could not find blobinfo for key %s" % blob_key) @classmethod def split_input(cls, mapper_spec): """Returns a list of shard_count input_spec_shards for input_spec. Args: mapper_spec: The mapper specification to split from. Must contain 'blob_keys' parameter with one or more blob keys. Returns: A list of BlobstoreInputReaders corresponding to the specified shards. """ params = _get_params(mapper_spec) blob_keys = params[cls.BLOB_KEYS_PARAM] if isinstance(blob_keys, basestring): # This is a mechanism to allow multiple blob keys (which do not contain # commas) in a single string. It may go away. blob_keys = blob_keys.split(",") blob_sizes = {} for blob_key in blob_keys: blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key)) blob_sizes[blob_key] = blob_info.size shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count) shards_per_blob = shard_count // len(blob_keys) if shards_per_blob == 0: shards_per_blob = 1 chunks = [] for blob_key, blob_size in blob_sizes.items(): blob_chunk_size = blob_size // shards_per_blob for i in xrange(shards_per_blob - 1): chunks.append(BlobstoreLineInputReader.from_json( {cls.BLOB_KEY_PARAM: blob_key, cls.INITIAL_POSITION_PARAM: blob_chunk_size * i, cls.END_POSITION_PARAM: blob_chunk_size * (i + 1)})) chunks.append(BlobstoreLineInputReader.from_json( {cls.BLOB_KEY_PARAM: blob_key, cls.INITIAL_POSITION_PARAM: blob_chunk_size * (shards_per_blob - 1), cls.END_POSITION_PARAM: blob_size})) return chunks class BlobstoreZipInputReader(InputReader): """Input reader for files from a zip archive stored in the Blobstore. Each instance of the reader will read the TOC, from the end of the zip file, and then only the contained files which it is responsible for. """ # Maximum number of shards to allow. _MAX_SHARD_COUNT = 256 # Mapreduce parameters. BLOB_KEY_PARAM = "blob_key" START_INDEX_PARAM = "start_index" END_INDEX_PARAM = "end_index" def __init__(self, blob_key, start_index, end_index, _reader=blobstore.BlobReader): """Initializes this instance with the given blob key and file range. This BlobstoreZipInputReader will read from the file with index start_index up to but not including the file with index end_index. Args: blob_key: the BlobKey that this input reader is processing. start_index: the index of the first file to read. end_index: the index of the first file that will not be read. _reader: a callable that returns a file-like object for reading blobs. Used for dependency injection. """ self._blob_key = blob_key self._start_index = start_index self._end_index = end_index self._reader = _reader self._zip = None self._entries = None def next(self): """Returns the next input from this input reader as (ZipInfo, opener) tuple. Returns: The next input from this input reader, in the form of a 2-tuple. The first element of the tuple is a zipfile.ZipInfo object. The second element of the tuple is a zero-argument function that, when called, returns the complete body of the file. """ if not self._zip: self._zip = zipfile.ZipFile(self._reader(self._blob_key)) # Get a list of entries, reversed so we can pop entries off in order self._entries = self._zip.infolist()[self._start_index:self._end_index] self._entries.reverse() if not self._entries: raise StopIteration() entry = self._entries.pop() self._start_index += 1 return (entry, lambda: self._read(entry)) def _read(self, entry): """Read entry content. Args: entry: zip file entry as zipfile.ZipInfo. Returns: Entry content as string. """ start_time = time.time() content = self._zip.read(entry.filename) ctx = context.get() if ctx: operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx) operation.counters.Increment( COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx) return content @classmethod def from_json(cls, json): """Creates an instance of the InputReader for the given input shard state. Args: json: The InputReader state as a dict-like object. Returns: An instance of the InputReader configured using the values of json. """ return cls(json[cls.BLOB_KEY_PARAM], json[cls.START_INDEX_PARAM], json[cls.END_INDEX_PARAM]) def to_json(self): """Returns an input shard state for the remaining inputs. Returns: A json-izable version of the remaining InputReader. """ return {self.BLOB_KEY_PARAM: self._blob_key, self.START_INDEX_PARAM: self._start_index, self.END_INDEX_PARAM: self._end_index} def __str__(self): """Returns the string representation of this BlobstoreZipInputReader.""" return "blobstore.BlobKey(%r):[%d, %d]" % ( self._blob_key, self._start_index, self._end_index) @classmethod def validate(cls, mapper_spec): """Validates mapper spec and all mapper parameters. Args: mapper_spec: The MapperSpec for this InputReader. Raises: BadReaderParamsError: required parameters are missing or invalid. """ if mapper_spec.input_reader_class() != cls: raise BadReaderParamsError("Mapper input reader class mismatch") params = _get_params(mapper_spec) if cls.BLOB_KEY_PARAM not in params: raise BadReaderParamsError("Must specify 'blob_key' for mapper input") blob_key = params[cls.BLOB_KEY_PARAM] blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key)) if not blob_info: raise BadReaderParamsError("Could not find blobinfo for key %s" % blob_key) @classmethod def split_input(cls, mapper_spec, _reader=blobstore.BlobReader): """Returns a list of input shard states for the input spec. Args: mapper_spec: The MapperSpec for this InputReader. Must contain 'blob_key' parameter with one blob key. _reader: a callable that returns a file-like object for reading blobs. Used for dependency injection. Returns: A list of InputReaders spanning files within the zip. """ params = _get_params(mapper_spec) blob_key = params[cls.BLOB_KEY_PARAM] zip_input = zipfile.ZipFile(_reader(blob_key)) zfiles = zip_input.infolist() total_size = sum(x.file_size for x in zfiles) num_shards = min(mapper_spec.shard_count, cls._MAX_SHARD_COUNT) size_per_shard = total_size // num_shards # Break the list of files into sublists, each of approximately # size_per_shard bytes. shard_start_indexes = [0] current_shard_size = 0 for i, fileinfo in enumerate(zfiles): current_shard_size += fileinfo.file_size if current_shard_size >= size_per_shard: shard_start_indexes.append(i + 1) current_shard_size = 0 if shard_start_indexes[-1] != len(zfiles): shard_start_indexes.append(len(zfiles)) return [cls(blob_key, start_index, end_index, _reader) for start_index, end_index in zip(shard_start_indexes, shard_start_indexes[1:])] class BlobstoreZipLineInputReader(InputReader): """Input reader for newline delimited files in zip archives from Blobstore. This has the same external interface as the BlobstoreLineInputReader, in that it takes a list of blobs as its input and yields lines to the reader. However the blobs themselves are expected to be zip archives of line delimited files instead of the files themselves. This is useful as many line delimited files gain greatly from compression. """ # Maximum number of shards to allow. _MAX_SHARD_COUNT = 256 # Maximum number of blobs to allow. _MAX_BLOB_KEYS_COUNT = 246 # Mapreduce parameters. BLOB_KEYS_PARAM = "blob_keys" # Serialization parameters. BLOB_KEY_PARAM = "blob_key" START_FILE_INDEX_PARAM = "start_file_index" END_FILE_INDEX_PARAM = "end_file_index" OFFSET_PARAM = "offset" def __init__(self, blob_key, start_file_index, end_file_index, offset, _reader=blobstore.BlobReader): """Initializes this instance with the given blob key and file range. This BlobstoreZipLineInputReader will read from the file with index start_file_index up to but not including the file with index end_file_index. It will return lines starting at offset within file[start_file_index] Args: blob_key: the BlobKey that this input reader is processing. start_file_index: the index of the first file to read within the zip. end_file_index: the index of the first file that will not be read. offset: the byte offset within blob_key.zip[start_file_index] to start reading. The reader will continue to the end of the file. _reader: a callable that returns a file-like object for reading blobs. Used for dependency injection. """ self._blob_key = blob_key self._start_file_index = start_file_index self._end_file_index = end_file_index self._initial_offset = offset self._reader = _reader self._zip = None self._entries = None self._filestream = None @classmethod def validate(cls, mapper_spec): """Validates mapper spec and all mapper parameters. Args: mapper_spec: The MapperSpec for this InputReader. Raises: BadReaderParamsError: required parameters are missing or invalid. """ if mapper_spec.input_reader_class() != cls: raise BadReaderParamsError("Mapper input reader class mismatch") params = _get_params(mapper_spec) if cls.BLOB_KEYS_PARAM not in params: raise BadReaderParamsError("Must specify 'blob_keys' for mapper input") blob_keys = params[cls.BLOB_KEYS_PARAM] if isinstance(blob_keys, basestring): # This is a mechanism to allow multiple blob keys (which do not contain # commas) in a single string. It may go away. blob_keys = blob_keys.split(",") if len(blob_keys) > cls._MAX_BLOB_KEYS_COUNT: raise BadReaderParamsError("Too many 'blob_keys' for mapper input") if not blob_keys: raise BadReaderParamsError("No 'blob_keys' specified for mapper input") for blob_key in blob_keys: blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key)) if not blob_info: raise BadReaderParamsError("Could not find blobinfo for key %s" % blob_key) @classmethod def split_input(cls, mapper_spec, _reader=blobstore.BlobReader): """Returns a list of input readers for the input spec. Args: mapper_spec: The MapperSpec for this InputReader. Must contain 'blob_keys' parameter with one or more blob keys. _reader: a callable that returns a file-like object for reading blobs. Used for dependency injection. Returns: A list of InputReaders spanning the subfiles within the blobs. There will be at least one reader per blob, but it will otherwise attempt to keep the expanded size even. """ params = _get_params(mapper_spec) blob_keys = params[cls.BLOB_KEYS_PARAM] if isinstance(blob_keys, basestring): # This is a mechanism to allow multiple blob keys (which do not contain # commas) in a single string. It may go away. blob_keys = blob_keys.split(",") blob_files = {} total_size = 0 for blob_key in blob_keys: zip_input = zipfile.ZipFile(_reader(blob_key)) blob_files[blob_key] = zip_input.infolist() total_size += sum(x.file_size for x in blob_files[blob_key]) shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count) # We can break on both blob key and file-within-zip boundaries. # A shard will span at minimum a single blob key, but may only # handle a few files within a blob. size_per_shard = total_size // shard_count readers = [] for blob_key in blob_keys: bfiles = blob_files[blob_key] current_shard_size = 0 start_file_index = 0 next_file_index = 0 for fileinfo in bfiles: next_file_index += 1 current_shard_size += fileinfo.file_size if current_shard_size >= size_per_shard: readers.append(cls(blob_key, start_file_index, next_file_index, 0, _reader)) current_shard_size = 0 start_file_index = next_file_index if current_shard_size != 0: readers.append(cls(blob_key, start_file_index, next_file_index, 0, _reader)) return readers def next(self): """Returns the next line from this input reader as (lineinfo, line) tuple. Returns: The next input from this input reader, in the form of a 2-tuple. The first element of the tuple describes the source, it is itself a tuple (blobkey, filenumber, byteoffset). The second element of the tuple is the line found at that offset. """ if not self._filestream: if not self._zip: self._zip = zipfile.ZipFile(self._reader(self._blob_key)) # Get a list of entries, reversed so we can pop entries off in order self._entries = self._zip.infolist()[self._start_file_index: self._end_file_index] self._entries.reverse() if not self._entries: raise StopIteration() entry = self._entries.pop() value = self._zip.read(entry.filename) self._filestream = StringIO.StringIO(value) if self._initial_offset: self._filestream.seek(self._initial_offset) self._filestream.readline() start_position = self._filestream.tell() line = self._filestream.readline() if not line: # Done with this file in the zip. Move on to the next file. self._filestream.close() self._filestream = None self._start_file_index += 1 self._initial_offset = 0 return self.next() return ((self._blob_key, self._start_file_index, start_position), line.rstrip("\n")) def _next_offset(self): """Return the offset of the next line to read.""" if self._filestream: offset = self._filestream.tell() if offset: offset -= 1 else: offset = self._initial_offset return offset def to_json(self): """Returns an input shard state for the remaining inputs. Returns: A json-izable version of the remaining InputReader. """ return {self.BLOB_KEY_PARAM: self._blob_key, self.START_FILE_INDEX_PARAM: self._start_file_index, self.END_FILE_INDEX_PARAM: self._end_file_index, self.OFFSET_PARAM: self._next_offset()} @classmethod def from_json(cls, json, _reader=blobstore.BlobReader): """Creates an instance of the InputReader for the given input shard state. Args: json: The InputReader state as a dict-like object. _reader: For dependency injection. Returns: An instance of the InputReader configured using the values of json. """ return cls(json[cls.BLOB_KEY_PARAM], json[cls.START_FILE_INDEX_PARAM], json[cls.END_FILE_INDEX_PARAM], json[cls.OFFSET_PARAM], _reader) def __str__(self): """Returns the string representation of this reader. Returns: string blobkey:[start file num, end file num]:current offset. """ return "blobstore.BlobKey(%r):[%d, %d]:%d" % ( self._blob_key, self._start_file_index, self._end_file_index, self._next_offset()) class RandomStringInputReader(InputReader): """RandomStringInputReader generates random strings as output. Primary usage is to populate output with testing entries. """ # Total number of entries this reader should generate. COUNT = "count" # Length of the generated strings. STRING_LENGTH = "string_length" DEFAULT_STRING_LENGTH = 10 def __init__(self, count, string_length): """Initialize input reader. Args: count: number of entries this shard should generate. string_length: the length of generated random strings. """ self._count = count self._string_length = string_length def __iter__(self): ctx = context.get() while self._count: self._count -= 1 start_time = time.time() content = "".join(random.choice(string.ascii_lowercase) for _ in range(self._string_length)) if ctx: operation.counters.Increment( COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx) operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx) yield content @classmethod def split_input(cls, mapper_spec): params = _get_params(mapper_spec) count = params[cls.COUNT] string_length = cls.DEFAULT_STRING_LENGTH if cls.STRING_LENGTH in params: string_length = params[cls.STRING_LENGTH] shard_count = mapper_spec.shard_count count_per_shard = count // shard_count mr_input_readers = [ cls(count_per_shard, string_length) for _ in range(shard_count)] left = count - count_per_shard*shard_count if left > 0: mr_input_readers.append(cls(left, string_length)) return mr_input_readers @classmethod def validate(cls, mapper_spec): if mapper_spec.input_reader_class() != cls: raise BadReaderParamsError("Mapper input reader class mismatch") params = _get_params(mapper_spec) if cls.COUNT not in params: raise BadReaderParamsError("Must specify %s" % cls.COUNT) if not isinstance(params[cls.COUNT], int): raise BadReaderParamsError("%s should be an int but is %s" % (cls.COUNT, type(params[cls.COUNT]))) if params[cls.COUNT] <= 0: raise BadReaderParamsError("%s should be a positive int") if cls.STRING_LENGTH in params and not ( isinstance(params[cls.STRING_LENGTH], int) and params[cls.STRING_LENGTH] > 0): raise BadReaderParamsError("%s should be a positive int but is %s" % (cls.STRING_LENGTH, params[cls.STRING_LENGTH])) if (not isinstance(mapper_spec.shard_count, int) or mapper_spec.shard_count <= 0): raise BadReaderParamsError( "shard_count should be a positive int but is %s" % mapper_spec.shard_count) @classmethod def from_json(cls, json): return cls(json[cls.COUNT], json[cls.STRING_LENGTH]) def to_json(self): return {self.COUNT: self._count, self.STRING_LENGTH: self._string_length} # TODO(user): This reader always produces only one shard, because # namespace entities use the mix of ids/names, and KeyRange-based splitting # doesn't work satisfactory in this case. # It's possible to implement specific splitting functionality for the reader # instead of reusing generic one. Meanwhile 1 shard is enough for our # applications. class NamespaceInputReader(InputReader): """An input reader to iterate over namespaces. This reader yields namespace names as string. It will always produce only one shard. """ NAMESPACE_RANGE_PARAM = "namespace_range" BATCH_SIZE_PARAM = "batch_size" _BATCH_SIZE = 10 def __init__(self, ns_range, batch_size=_BATCH_SIZE): self.ns_range = ns_range self._batch_size = batch_size def to_json(self): """Serializes all the data in this query range into json form. Returns: all the data in json-compatible map. """ return {self.NAMESPACE_RANGE_PARAM: self.ns_range.to_json_object(), self.BATCH_SIZE_PARAM: self._batch_size} @classmethod def from_json(cls, json): """Create new DatastoreInputReader from the json, encoded by to_json. Args: json: json map representation of DatastoreInputReader. Returns: an instance of DatastoreInputReader with all data deserialized from json. """ return cls( namespace_range.NamespaceRange.from_json_object( json[cls.NAMESPACE_RANGE_PARAM]), json[cls.BATCH_SIZE_PARAM]) @classmethod def validate(cls, mapper_spec): """Validates mapper spec. Args: mapper_spec: The MapperSpec for this InputReader. Raises: BadReaderParamsError: required parameters are missing or invalid. """ if mapper_spec.input_reader_class() != cls: raise BadReaderParamsError("Input reader class mismatch") params = _get_params(mapper_spec) if cls.BATCH_SIZE_PARAM in params: try: batch_size = int(params[cls.BATCH_SIZE_PARAM]) if batch_size < 1: raise BadReaderParamsError("Bad batch size: %s" % batch_size) except ValueError, e: raise BadReaderParamsError("Bad batch size: %s" % e) @classmethod def split_input(cls, mapper_spec): """Returns a list of input readers for the input spec. Args: mapper_spec: The MapperSpec for this InputReader. Returns: A list of InputReaders. """ batch_size = int(_get_params(mapper_spec).get( cls.BATCH_SIZE_PARAM, cls._BATCH_SIZE)) shard_count = mapper_spec.shard_count namespace_ranges = namespace_range.NamespaceRange.split(shard_count, contiguous=True) return [NamespaceInputReader(ns_range, batch_size) for ns_range in namespace_ranges] def __iter__(self): while True: keys = self.ns_range.make_datastore_query().Get(limit=self._batch_size) if not keys: break for key in keys: namespace = metadata.Namespace.key_to_namespace(key) self.ns_range = self.ns_range.with_start_after(namespace) yield namespace def __str__(self): return repr(self.ns_range) class RecordsReader(InputReader): """Reader to read a list of Files API file in records format. The number of input shards can be specified by the SHARDS_PARAM mapper parameter. Input files cannot be split, so there will be at most one shard per file. Also the number of shards will not be reduced based on the number of input files, so shards in always equals shards out. """ FILE_PARAM = "file" FILES_PARAM = "files" def __init__(self, filenames, position): """Constructor. Args: filenames: list of filenames. position: file position to start reading from as int. """ self._filenames = filenames if self._filenames: self._reader = records.RecordsReader( files.BufferedFile(self._filenames[0])) self._reader.seek(position) else: self._reader = None def __iter__(self): """Iterate over records in file. Yields: Records as strings. """ ctx = context.get() while self._reader: try: start_time = time.time() record = self._reader.read() if ctx: operation.counters.Increment( COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx) operation.counters.Increment(COUNTER_IO_READ_BYTES, len(record))(ctx) yield record except (files.ExistenceError), e: raise errors.FailJobError("ExistenceError: %s" % e) except (files.UnknownError), e: raise errors.RetrySliceError("UnknownError: %s" % e) except EOFError: self._filenames.pop(0) if not self._filenames: self._reader = None else: self._reader = records.RecordsReader( files.BufferedFile(self._filenames[0])) @classmethod def from_json(cls, json): """Creates an instance of the InputReader for the given input shard state. Args: json: The InputReader state as a dict-like object. Returns: An instance of the InputReader configured using the values of json. """ return cls(json["filenames"], json["position"]) def to_json(self): """Returns an input shard state for the remaining inputs. Returns: A json-izable version of the remaining InputReader. """ result = { "filenames": self._filenames, "position": 0, } if self._reader: result["position"] = self._reader.tell() return result @classmethod def split_input(cls, mapper_spec): """Returns a list of input readers for the input spec. Args: mapper_spec: The MapperSpec for this InputReader. Returns: A list of InputReaders. """ params = _get_params(mapper_spec) shard_count = mapper_spec.shard_count if cls.FILES_PARAM in params: filenames = params[cls.FILES_PARAM] if isinstance(filenames, basestring): filenames = filenames.split(",") else: filenames = [params[cls.FILE_PARAM]] batch_list = [[] for _ in xrange(shard_count)] for index, _ in enumerate(filenames): # Simplest round robin so we don't have any short shards. batch_list[index % shard_count].append(filenames[index]) # Sort from most shards to least shards so the short shard is last. batch_list.sort(reverse=True, key=len) return [cls(batch, 0) for batch in batch_list] @classmethod def validate(cls, mapper_spec): """Validates mapper spec and all mapper parameters. Args: mapper_spec: The MapperSpec for this InputReader. Raises: BadReaderParamsError: required parameters are missing or invalid. """ if mapper_spec.input_reader_class() != cls: raise errors.BadReaderParamsError("Input reader class mismatch") params = _get_params(mapper_spec) if (cls.FILES_PARAM not in params and cls.FILE_PARAM not in params): raise BadReaderParamsError( "Must specify '%s' or '%s' parameter for mapper input" % (cls.FILES_PARAM, cls.FILE_PARAM)) def __str__(self): position = 0 if self._reader: position = self._reader.tell() return "%s:%s" % (self._filenames, position) class LogInputReader(InputReader): """Input reader for a time range of logs via the Logs Reader API. The number of input shards may be specified by the SHARDS_PARAM mapper parameter. A starting and ending time (in seconds since the Unix epoch) are required to generate time ranges over which to shard the input. """ # Parameters directly mapping to those available via logservice.fetch(). START_TIME_PARAM = "start_time" END_TIME_PARAM = "end_time" MINIMUM_LOG_LEVEL_PARAM = "minimum_log_level" INCLUDE_INCOMPLETE_PARAM = "include_incomplete" INCLUDE_APP_LOGS_PARAM = "include_app_logs" VERSION_IDS_PARAM = "version_ids" MODULE_VERSIONS_PARAM = "module_versions" # Semi-hidden parameters used only internally or for privileged applications. _OFFSET_PARAM = "offset" _PROTOTYPE_REQUEST_PARAM = "prototype_request" _PARAMS = frozenset([START_TIME_PARAM, END_TIME_PARAM, _OFFSET_PARAM, MINIMUM_LOG_LEVEL_PARAM, INCLUDE_INCOMPLETE_PARAM, INCLUDE_APP_LOGS_PARAM, VERSION_IDS_PARAM, MODULE_VERSIONS_PARAM, _PROTOTYPE_REQUEST_PARAM]) _KWARGS = frozenset([_OFFSET_PARAM, _PROTOTYPE_REQUEST_PARAM]) def __init__(self, start_time=None, end_time=None, minimum_log_level=None, include_incomplete=False, include_app_logs=False, version_ids=None, module_versions=None, **kwargs): """Constructor. Args: start_time: The earliest request completion or last-update time of logs that should be mapped over, in seconds since the Unix epoch. end_time: The latest request completion or last-update time that logs should be mapped over, in seconds since the Unix epoch. minimum_log_level: An application log level which serves as a filter on the requests mapped over--requests with no application log at or above the specified level will be omitted, even if include_app_logs is False. include_incomplete: Whether or not to include requests that have started but not yet finished, as a boolean. Defaults to False. include_app_logs: Whether or not to include application level logs in the mapped logs, as a boolean. Defaults to False. version_ids: A list of version ids whose logs should be read. This can not be used with module_versions module_versions: A list of tuples containing a module and version id whose logs should be read. This can not be used with version_ids **kwargs: A dictionary of keywords associated with this input reader. """ InputReader.__init__(self) # pylint: disable=non-parent-init-called # The rule for __params is that its contents will always be suitable as # input to logservice.fetch(). self.__params = dict(kwargs) if start_time is not None: self.__params[self.START_TIME_PARAM] = start_time if end_time is not None: self.__params[self.END_TIME_PARAM] = end_time if minimum_log_level is not None: self.__params[self.MINIMUM_LOG_LEVEL_PARAM] = minimum_log_level if include_incomplete is not None: self.__params[self.INCLUDE_INCOMPLETE_PARAM] = include_incomplete if include_app_logs is not None: self.__params[self.INCLUDE_APP_LOGS_PARAM] = include_app_logs if version_ids: self.__params[self.VERSION_IDS_PARAM] = version_ids if module_versions: self.__params[self.MODULE_VERSIONS_PARAM] = module_versions # Any submitted prototype_request will be in encoded form. if self._PROTOTYPE_REQUEST_PARAM in self.__params: prototype_request = log_service_pb.LogReadRequest( self.__params[self._PROTOTYPE_REQUEST_PARAM]) self.__params[self._PROTOTYPE_REQUEST_PARAM] = prototype_request def __iter__(self): """Iterates over logs in a given range of time. Yields: A RequestLog containing all the information for a single request. """ for log in logservice.fetch(**self.__params): self.__params[self._OFFSET_PARAM] = log.offset yield log @classmethod def from_json(cls, json): """Creates an instance of the InputReader for the given input shard's state. Args: json: The InputReader state as a dict-like object. Returns: An instance of the InputReader configured using the given JSON parameters. """ # Strip out unrecognized parameters, as introduced by b/5960884. params = dict((str(k), v) for k, v in json.iteritems() if k in cls._PARAMS) # This is not symmetric with to_json() wrt. PROTOTYPE_REQUEST_PARAM because # the constructor parameters need to be JSON-encodable, so the decoding # needs to happen there anyways. if cls._OFFSET_PARAM in params: params[cls._OFFSET_PARAM] = base64.b64decode(params[cls._OFFSET_PARAM]) return cls(**params) def to_json(self): """Returns an input shard state for the remaining inputs. Returns: A JSON serializable version of the remaining input to read. """ params = dict(self.__params) # Shallow copy. if self._PROTOTYPE_REQUEST_PARAM in params: prototype_request = params[self._PROTOTYPE_REQUEST_PARAM] params[self._PROTOTYPE_REQUEST_PARAM] = prototype_request.Encode() if self._OFFSET_PARAM in params: params[self._OFFSET_PARAM] = base64.b64encode(params[self._OFFSET_PARAM]) return params @classmethod def split_input(cls, mapper_spec): """Returns a list of input readers for the given input specification. Args: mapper_spec: The MapperSpec for this InputReader. Returns: A list of InputReaders. """ params = _get_params(mapper_spec) shard_count = mapper_spec.shard_count # Pick out the overall start and end times and time step per shard. start_time = params[cls.START_TIME_PARAM] end_time = params[cls.END_TIME_PARAM] seconds_per_shard = (end_time - start_time) / shard_count # Create a LogInputReader for each shard, modulating the params as we go. shards = [] for _ in xrange(shard_count - 1): params[cls.END_TIME_PARAM] = (params[cls.START_TIME_PARAM] + seconds_per_shard) shards.append(LogInputReader(**params)) params[cls.START_TIME_PARAM] = params[cls.END_TIME_PARAM] # Create a final shard to complete the time range. params[cls.END_TIME_PARAM] = end_time return shards + [LogInputReader(**params)] @classmethod def validate(cls, mapper_spec): """Validates the mapper's specification and all necessary parameters. Args: mapper_spec: The MapperSpec to be used with this InputReader. Raises: BadReaderParamsError: If the user fails to specify both a starting time and an ending time, or if the starting time is later than the ending time. """ if mapper_spec.input_reader_class() != cls: raise errors.BadReaderParamsError("Input reader class mismatch") params = _get_params(mapper_spec, allowed_keys=cls._PARAMS) if (cls.VERSION_IDS_PARAM not in params and cls.MODULE_VERSIONS_PARAM not in params): raise errors.BadReaderParamsError("Must specify a list of version ids or " "module/version ids for mapper input") if (cls.VERSION_IDS_PARAM in params and cls.MODULE_VERSIONS_PARAM in params): raise errors.BadReaderParamsError("Can not supply both version ids or " "module/version ids. Use only one.") if (cls.START_TIME_PARAM not in params or params[cls.START_TIME_PARAM] is None): raise errors.BadReaderParamsError("Must specify a starting time for " "mapper input") if cls.END_TIME_PARAM not in params or params[cls.END_TIME_PARAM] is None: params[cls.END_TIME_PARAM] = time.time() if params[cls.START_TIME_PARAM] >= params[cls.END_TIME_PARAM]: raise errors.BadReaderParamsError("The starting time cannot be later " "than or the same as the ending time.") if cls._PROTOTYPE_REQUEST_PARAM in params: try: params[cls._PROTOTYPE_REQUEST_PARAM] = log_service_pb.LogReadRequest( params[cls._PROTOTYPE_REQUEST_PARAM]) except (TypeError, ProtocolBuffer.ProtocolBufferDecodeError): raise errors.BadReaderParamsError("The prototype request must be " "parseable as a LogReadRequest.") # Pass the parameters to logservice.fetch() to verify any underlying # constraints on types or values. This only constructs an iterator, it # doesn't trigger any requests for actual log records. try: logservice.fetch(**params) except logservice.InvalidArgumentError, e: raise errors.BadReaderParamsError("One or more parameters are not valid " "inputs to logservice.fetch(): %s" % e) def __str__(self): """Returns the string representation of this LogInputReader.""" params = [] for key in sorted(self.__params.keys()): value = self.__params[key] if key is self._PROTOTYPE_REQUEST_PARAM: params.append("%s='%s'" % (key, value)) elif key is self._OFFSET_PARAM: params.append("%s='%s'" % (key, value)) else: params.append("%s=%s" % (key, value)) return "LogInputReader(%s)" % ", ".join(params) class _GoogleCloudStorageInputReader(InputReader): """Input reader from Google Cloud Storage using the cloudstorage library. This class is expected to be subclassed with a reader that understands user-level records. Required configuration in the mapper_spec.input_reader dictionary. BUCKET_NAME_PARAM: name of the bucket to use (with no extra delimiters or suffixed such as directories. OBJECT_NAMES_PARAM: a list of object names or prefixes. All objects must be in the BUCKET_NAME_PARAM bucket. If the name ends with a * it will be treated as prefix and all objects with matching names will be read. Entries should not start with a slash unless that is part of the object's name. An example list could be: ["my-1st-input-file", "directory/my-2nd-file", "some/other/dir/input-*"] To retrieve all files "*" will match every object in the bucket. If a file is listed twice or is covered by multiple prefixes it will be read twice, there is no deduplication. Optional configuration in the mapper_sec.input_reader dictionary. BUFFER_SIZE_PARAM: the size of the read buffer for each file handle. DELIMITER_PARAM: if specified, turn on the shallow splitting mode. The delimiter is used as a path separator to designate directory hierarchy. Matching of prefixes from OBJECT_NAME_PARAM will stop at the first directory instead of matching all files under the directory. This allows MR to process bucket with hundreds of thousands of files. """ # Supported parameters BUCKET_NAME_PARAM = "bucket_name" OBJECT_NAMES_PARAM = "objects" BUFFER_SIZE_PARAM = "buffer_size" DELIMITER_PARAM = "delimiter" # Internal parameters _ACCOUNT_ID_PARAM = "account_id" # Other internal configuration constants _JSON_PICKLE = "pickle" _STRING_MAX_FILES_LISTED = 10 # Max files shown in the str representation # Input reader can also take in start and end filenames and do # listbucket. This saves space but has two cons. # 1. Files to read are less well defined: files can be added or removed over # the lifetime of the MR job. # 2. A shard has to process files from a contiguous namespace. # May introduce staggering shard. def __init__(self, filenames, index=0, buffer_size=None, _account_id=None, delimiter=None): """Initialize a GoogleCloudStorageInputReader instance. Args: filenames: A list of Google Cloud Storage filenames of the form '/bucket/objectname'. index: Index of the next filename to read. buffer_size: The size of the read buffer, None to use default. _account_id: Internal use only. See cloudstorage documentation. delimiter: Delimiter used as path separator. See class doc for details. """ self._filenames = filenames self._index = index self._buffer_size = buffer_size self._account_id = _account_id self._delimiter = delimiter self._bucket = None self._bucket_iter = None def _next_file(self): """Find next filename. self._filenames may need to be expanded via listbucket. Returns: None if no more file is left. Filename otherwise. """ while True: if self._bucket_iter: try: return self._bucket_iter.next().filename except StopIteration: self._bucket_iter = None self._bucket = None if self._index >= len(self._filenames): return filename = self._filenames[self._index] self._index += 1 if self._delimiter is None or not filename.endswith(self._delimiter): return filename self._bucket = cloudstorage.listbucket(filename, delimiter=self._delimiter) self._bucket_iter = iter(self._bucket) @classmethod def validate(cls, mapper_spec): """Validate mapper specification. Args: mapper_spec: an instance of model.MapperSpec Raises: BadReaderParamsError: if the specification is invalid for any reason such as missing the bucket name or providing an invalid bucket name. """ reader_spec = _get_params(mapper_spec, allow_old=False) # Bucket Name is required if cls.BUCKET_NAME_PARAM not in reader_spec: raise errors.BadReaderParamsError( "%s is required for Google Cloud Storage" % cls.BUCKET_NAME_PARAM) try: cloudstorage.validate_bucket_name( reader_spec[cls.BUCKET_NAME_PARAM]) except ValueError, error: raise errors.BadReaderParamsError("Bad bucket name, %s" % (error)) # Object Name(s) are required if cls.OBJECT_NAMES_PARAM not in reader_spec: raise errors.BadReaderParamsError( "%s is required for Google Cloud Storage" % cls.OBJECT_NAMES_PARAM) filenames = reader_spec[cls.OBJECT_NAMES_PARAM] if not isinstance(filenames, list): raise errors.BadReaderParamsError( "Object name list is not a list but a %s" % filenames.__class__.__name__) for filename in filenames: if not isinstance(filename, basestring): raise errors.BadReaderParamsError( "Object name is not a string but a %s" % filename.__class__.__name__) if cls.DELIMITER_PARAM in reader_spec: delimiter = reader_spec[cls.DELIMITER_PARAM] if not isinstance(delimiter, basestring): raise errors.BadReaderParamsError( "%s is not a string but a %s" % (cls.DELIMITER_PARAM, type(delimiter))) @classmethod def split_input(cls, mapper_spec): """Returns a list of input readers. An equal number of input files are assigned to each shard (+/- 1). If there are fewer files than shards, fewer than the requested number of shards will be used. Input files are currently never split (although for some formats could be and may be split in a future implementation). Args: mapper_spec: an instance of model.MapperSpec. Returns: A list of InputReaders. None when no input data can be found. """ reader_spec = _get_params(mapper_spec, allow_old=False) bucket = reader_spec[cls.BUCKET_NAME_PARAM] filenames = reader_spec[cls.OBJECT_NAMES_PARAM] delimiter = reader_spec.get(cls.DELIMITER_PARAM) account_id = reader_spec.get(cls._ACCOUNT_ID_PARAM) buffer_size = reader_spec.get(cls.BUFFER_SIZE_PARAM) # Gather the complete list of files (expanding wildcards) all_filenames = [] for filename in filenames: if filename.endswith("*"): all_filenames.extend( [file_stat.filename for file_stat in cloudstorage.listbucket( "/" + bucket + "/" + filename[:-1], delimiter=delimiter, _account_id=account_id)]) else: all_filenames.append("/%s/%s" % (bucket, filename)) # Split into shards readers = [] for shard in range(0, mapper_spec.shard_count): shard_filenames = all_filenames[shard::mapper_spec.shard_count] if shard_filenames: readers.append(cls( shard_filenames, buffer_size=buffer_size, _account_id=account_id, delimiter=delimiter)) return readers @classmethod def from_json(cls, state): obj = pickle.loads(state[cls._JSON_PICKLE]) if obj._bucket: obj._bucket_iter = iter(obj._bucket) return obj def to_json(self): before_iter = self._bucket_iter self._bucket_iter = None try: return {self._JSON_PICKLE: pickle.dumps(self)} finally: self._bucket_itr = before_iter def next(self): """Returns the next input from this input reader, a block of bytes. Non existent files will be logged and skipped. The file might have been removed after input splitting. Returns: The next input from this input reader in the form of a cloudstorage ReadBuffer that supports a File-like interface (read, readline, seek, tell, and close). An error may be raised if the file can not be opened. Raises: StopIteration: The list of files has been exhausted. """ options = {} if self._buffer_size: options["read_buffer_size"] = self._buffer_size if self._account_id: options["_account_id"] = self._account_id while True: filename = self._next_file() if filename is None: raise StopIteration() try: start_time = time.time() handle = cloudstorage.open(filename, **options) ctx = context.get() if ctx: operation.counters.Increment( COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx) return handle except cloudstorage.NotFoundError: logging.warning("File %s may have been removed. Skipping file.", filename) def __str__(self): # Only show a limited number of files individually for readability num_files = len(self._filenames) if num_files > self._STRING_MAX_FILES_LISTED: names = "%s...%s + %d not shown" % ( ",".join(self._filenames[0:self._STRING_MAX_FILES_LISTED-1]), self._filenames[-1], num_files - self._STRING_MAX_FILES_LISTED) else: names = ",".join(self._filenames) if self._index > num_files: status = "EOF" else: status = "Next %s (%d of %d)" % ( self._filenames[self._index], self._index + 1, # +1 for human 1-indexing num_files) return "CloudStorage [%s, %s]" % (status, names) class _GoogleCloudStorageRecordInputReader(_GoogleCloudStorageInputReader): """Read data from a Google Cloud Storage file using LevelDB format. See the _GoogleCloudStorageOutputWriter for additional configuration options. """ def __getstate__(self): result = self.__dict__.copy() # record reader may not exist if reader has not been used if "_record_reader" in result: # RecordsReader has no buffering, it can safely be reconstructed after # deserialization result.pop("_record_reader") return result def next(self): """Returns the next input from this input reader, a record. Returns: The next input from this input reader in the form of a record read from an LevelDB file. Raises: StopIteration: The ordered set records has been exhausted. """ while True: if not hasattr(self, "_cur_handle") or self._cur_handle is None: # If there are no more files, StopIteration is raised here self._cur_handle = super(_GoogleCloudStorageRecordInputReader, self).next() if not hasattr(self, "_record_reader") or self._record_reader is None: self._record_reader = records.RecordsReader(self._cur_handle) try: start_time = time.time() content = self._record_reader.read() ctx = context.get() if ctx: operation.counters.Increment(COUNTER_IO_READ_BYTES, len(content))(ctx) operation.counters.Increment( COUNTER_IO_READ_MSEC, int((time.time() - start_time) * 1000))(ctx) return content except EOFError: self._cur_handle = None self._record_reader = None # TODO(user): Use _GoogleCloudStorageInputReader instead of the File API. class _ReducerReader(RecordsReader): """Reader to read KeyValues records files from Files API.""" expand_parameters = True def __init__(self, filenames, position): super(_ReducerReader, self).__init__(filenames, position) self.current_key = None self.current_values = None def __iter__(self): ctx = context.get() combiner = None if ctx: combiner_spec = ctx.mapreduce_spec.mapper.params.get("combiner_spec") if combiner_spec: combiner = util.handler_for_name(combiner_spec) for binary_record in super(_ReducerReader, self).__iter__(): proto = file_service_pb.KeyValues() proto.ParseFromString(binary_record) to_yield = None if self.current_key is not None and self.current_key != proto.key(): to_yield = (self.current_key, self.current_values) self.current_key = None self.current_values = None if self.current_key is None: self.current_key = proto.key() self.current_values = [] if combiner: combiner_result = combiner( self.current_key, proto.value_list(), self.current_values) if not util.is_generator(combiner_result): raise errors.BadCombinerOutputError( "Combiner %s should yield values instead of returning them (%s)" % (combiner, combiner_result)) self.current_values = [] for value in combiner_result: if isinstance(value, operation.Operation): value(ctx) else: # With combiner the current values always come from the combiner. self.current_values.append(value) # Check-point after each combiner call is run only when there's nothing # that needs to be yielded below. Otherwise allowing a check-point here # would cause the current to_yield data to be lost. if not to_yield: yield ALLOW_CHECKPOINT else: # Without combiner we just accumulate values. self.current_values.extend(proto.value_list()) if to_yield: yield to_yield # Check-point after each key is yielded. yield ALLOW_CHECKPOINT # There may be some accumulated values left at the end of an input file # so be sure to yield those too. if self.current_key is not None: to_yield = (self.current_key, self.current_values) self.current_key = None self.current_values = None yield to_yield @staticmethod def encode_data(data): """Encodes the given data, which may have include raw bytes. Works around limitations in JSON encoding, which cannot handle raw bytes. Args: data: the data to encode. Returns: The data encoded. """ return base64.b64encode(pickle.dumps(data)) @staticmethod def decode_data(data): """Decodes data encoded with the encode_data function.""" return pickle.loads(base64.b64decode(data)) def to_json(self): """Returns an input shard state for the remaining inputs. Returns: A json-izable version of the remaining InputReader. """ result = super(_ReducerReader, self).to_json() result["current_key"] = self.encode_data(self.current_key) result["current_values"] = self.encode_data(self.current_values) return result @classmethod def from_json(cls, json): """Creates an instance of the InputReader for the given input shard state. Args: json: The InputReader state as a dict-like object. Returns: An instance of the InputReader configured using the values of json. """ result = super(_ReducerReader, cls).from_json(json) result.current_key = _ReducerReader.decode_data(json["current_key"]) result.current_values = _ReducerReader.decode_data(json["current_values"]) return result
<!DOCTYPE html> <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <html> <head> <meta charset="UTF-8" /> <title>Untitled Document</title> <style type="text/css"> img { border: 0; } </style> </head> <body> <p><a href="jsptoservlet.jsp"><img src="../images/execute.gif" alt=""> Execute</a><br /> <a href="../index.html"><img src="../images/return.gif" alt=""> Return</a></p> <p style="font-size: 1.4em;"><a href="jsptoservlet.jsp.html">Source Code for JSP calling servlet</a></p> <p style="font-size: 1.4em;"><a href="ServletToJsp.java.html">Source Code for Servlet calling JSP</a></p> </body> </html>
require File.expand_path('../../../spec_helper', __FILE__) require File.expand_path('../fixtures/classes', __FILE__) require File.expand_path('../../../fixtures/reflection', __FILE__) # TODO: rewrite # The reason why having include() and ruby_version_is is to show the specification explicitly. # You should use have_protected_method() with the exception of this spec. describe "Kernel#protected_methods" do ruby_version_is "" ... "1.9" do it "returns a list of the names of protected methods accessible in the object" do KernelSpecs::Methods.protected_methods(false).sort.should include("juu_ichi") KernelSpecs::Methods.new.protected_methods(false).should include("ku") end it "returns a list of the names of protected methods accessible in the object and from its ancestors and mixed-in modules" do l1 = KernelSpecs::Methods.protected_methods(false) l2 = KernelSpecs::Methods.protected_methods (l1 & l2).should include("juu_ichi") KernelSpecs::Methods.new.protected_methods.should include('ku') end it "returns protected methods mixed in to the metaclass" do m = KernelSpecs::Methods.new m.extend(KernelSpecs::Methods::MetaclassMethods) m.protected_methods.should include('nopeeking') end end ruby_version_is "1.9" do it "returns a list of the names of protected methods accessible in the object" do KernelSpecs::Methods.protected_methods(false).sort.should include(:juu_ichi) KernelSpecs::Methods.new.protected_methods(false).should include(:ku) end it "returns a list of the names of protected methods accessible in the object and from its ancestors and mixed-in modules" do l1 = KernelSpecs::Methods.protected_methods(false) l2 = KernelSpecs::Methods.protected_methods (l1 & l2).should include(:juu_ichi) KernelSpecs::Methods.new.protected_methods.should include(:ku) end it "returns methods mixed in to the metaclass" do m = KernelSpecs::Methods.new m.extend(KernelSpecs::Methods::MetaclassMethods) m.protected_methods.should include(:nopeeking) end end end describe :kernel_protected_methods_supers, :shared => true do it "returns a unique list for an object extended by a module" do m = ReflectSpecs.oed.protected_methods(*@object) m.select { |x| x == stasy(:pro) }.sort.should == [stasy(:pro)] end it "returns a unique list for a class including a module" do m = ReflectSpecs::D.new.protected_methods(*@object) m.select { |x| x == stasy(:pro) }.sort.should == [stasy(:pro)] end it "returns a unique list for a subclass of a class that includes a module" do m = ReflectSpecs::E.new.protected_methods(*@object) m.select { |x| x == stasy(:pro) }.sort.should == [stasy(:pro)] end end describe "Kernel#protected_methods" do describe "when not passed an argument" do it_behaves_like :kernel_protected_methods_supers, nil, [] end describe "when passed true" do it_behaves_like :kernel_protected_methods_supers, nil, true end end
// // ProjectionAlgorithm.cpp // CMakeProject_4.0 // // Created by blasse on 8/26/13. // // #include "ExtendedProjectionAlgorithm.h" #include <iostream> extern "C" { #include "array.h" #include "image.h" #include "utilities.h" } #include "Filter.h" #include "HeightMapAnalysis.h" #include "Interpolation.h" #include "Refinement.h" #include "ShowArray.h" #include "ZPlaneSelection.h" #include "ZProjection.h" using namespace ProjectionResults; namespace ProjectionMethod { /*********************** * Constructor / Destructor **********************/ ExtendedProjectionAlgorithm::ExtendedProjectionAlgorithm (std::string imgPath) { image_ = std::make_shared<RasterizedImage>(Read_Image(&imgPath[0],0)); parametersAreSet_ = false; } ExtendedProjectionAlgorithm::ExtendedProjectionAlgorithm () { parametersAreSet_ = false; } ExtendedProjectionAlgorithm::~ExtendedProjectionAlgorithm () { Free_Array(image_->GetImage()); } void ExtendedProjectionAlgorithm::SetAllParameters (ProjectionMethod::ExtendedParaSet & parameter) { extendedImageParameter_ = parameter; (*image_).SetGrid(extendedImageParameter_.radius); ProjectionMethod::ParaSet imageParameter; imageParameter.radius = parameter.radius; imageParameter.layer = 0; imageParameter.verbose = false; SetParameters(imageParameter); parametersAreSet_ = true; } void ExtendedProjectionAlgorithm::ProjectSurfaceUsingHeightMap (Array * heightMap) { if (parametersAreSet_ == false ) { std::cout << "Error: Parameters are not correctly set" << std::endl; return; } if ((*image_).GetType() == UINT8_TYPE) { SurfaceProjection_8bit(heightMap); } else if ((*image_).GetType() == UINT16_TYPE) { SurfaceProjection_Fast16bit(heightMap); } else { std::cout << "This image type is not supported (only uint8 and unit16)!" << std::endl; return; } } void ExtendedProjectionAlgorithm::SurfaceProjection_8bit (Array * heightMap) { Array * maxim = MaxProjection_8bit((*image_), imageParameter_); Array * index = MaxIndices_8bit((*image_), imageParameter_, maxim); imageParameter_.radius = imageParameter_.radius/2; (*image_).UpdateGrid(2); Array * refinedLevel = DecomposeImage(heightMap); Array * level = SelectPlanes_Variance_UsingHeightMap((*image_), imageParameter_, extendedImageParameter_, refinedLevel); Array * filteredLevel = LocalMedianFilter((*image_), imageParameter_, level, 1, (*image_).GetDepth()/10); Array * corners = InterpolateCorners((*image_), imageParameter_, filteredLevel); ResultArrays result = InterpolatePlanes_8bit((*image_), imageParameter_, maxim, corners); heightMaps_.push_back(filteredLevel); interpolatedHeightMaps_.push_back(result.interpolatedHeightMap); projections_.push_back(result.projection); imageParameter_.layer = 1; Free_Array(refinedLevel); Free_Array(level); Free_Array(corners); } void ExtendedProjectionAlgorithm::SurfaceProjection_Fast16bit (Array * heightMap) { Array * maxim = MaxProjection_8bit((*image_), imageParameter_); Array * index = MaxIndices_8bit((*image_), imageParameter_, maxim); imageParameter_.radius = imageParameter_.radius/2; (*image_).UpdateGrid(2); Array * refinedLevel = DecomposeImage(heightMap); Array * level = SelectPlanes_Variance_UsingHeightMap((*image_), imageParameter_, extendedImageParameter_, refinedLevel); Array * filteredLevel = LocalMedianFilter((*image_), imageParameter_, level, 1, (*image_).GetDepth()/10); Array * corners = InterpolateCorners((*image_), imageParameter_, filteredLevel); ResultArrays result = InterpolatePlanes_16bit((*image_), imageParameter_, maxim, corners); heightMaps_.push_back(filteredLevel); interpolatedHeightMaps_.push_back(result.interpolatedHeightMap); projections_.push_back(result.projection); imageParameter_.layer = 1; Free_Array(refinedLevel); Free_Array(level); Free_Array(corners); } // /*********************** // * Get functions // **********************/ // // RasterizedImage ExtendedProjectionAlgorithm::GetImage() const { // return image_; // } // // int ExtendedProjectionAlgorithm::GetGridSize() const { // return imageParameter_.radius; // } // // int ExtendedProjectionAlgorithm::GetDistance() const { // return imageParameter_.distance; // } // // int ExtendedProjectionAlgorithm::GetLayerNumber() const { // return imageParameter_.layer; // } // // int ExtendedProjectionAlgorithm::GetBrightPixelCount() const { // return imageParameter_.threshold; // } // // bool ExtendedProjectionAlgorithm::IsVerbose() const { // return imageParameter_.verbose; // } // // bool ProjectionAlgorithm::UseMaximumInterpolation() const { // return imageParameter_.maxInterpolation; // } // // bool ProjectionAlgorithm::PrintHeightMap() const { // return imageParameter_.printHeightMap; // } // // bool ProjectionAlgorithm::PrintHeightMapRealSize() const { // return imageParameter_.printRealHeightMap; // } // // ParaSet ProjectionAlgorithm::GetParameterSet() const { // return imageParameter_; // } // // /*********************** // * Set functions // **********************/ // // void ProjectionAlgorithm::SetGridSize(int gridSize) { // // if (gridSize % 2 != 0) { // gridSize++; // } // imageParameter_.radius = gridSize; // } // // void ProjectionAlgorithm::SetDistance(int distance) { // imageParameter_.distance = distance; // } // // void ProjectionAlgorithm::SetLayerNumber(int layerNumber) { // imageParameter_.layer = layerNumber; // } // // void ProjectionAlgorithm::SetBrightPixelCount(int brightPixelCount) { // imageParameter_.threshold = brightPixelCount; // } // // void ProjectionAlgorithm::SetVerbose(bool verbose) { // imageParameter_.verbose = verbose; // } // // void ProjectionAlgorithm::SetMaximumInterpolation(bool maximumInterpolation) { // imageParameter_.maxInterpolation = maximumInterpolation; // } // // void ProjectionAlgorithm::SetPrintHeightMap(int printHeightMap) { // imageParameter_.printHeightMap = printHeightMap; // } // // void ProjectionAlgorithm::SetPrintHeightMapRealSize(int printHeightMapRealSize) { // imageParameter_.printRealHeightMap = printHeightMapRealSize; // } // // Array * ProjectionAlgorithm::RescaleHeightMap (Array * heightMap) { // // Array * realHM = Make_Array_With_Shape(PLAIN_KIND, UINT32_TYPE, Coord2(image_.GetRealHeight(), image_.GetRealWidth())); // uint32 * realHMVals = AUINT32(realHM); // uint32 * hMVals = AUINT32(heightMap); // // for (int x=0; x <image_.GetRealWidth(); x++) { // for (int y=0; y<image_.GetRealHeight(); y++) { // // Indx_Type idx = Coord2IdxA(heightMap, Coord2(floor((double)y/imageParameter_ .radius), floor((double)x/imageParameter_.radius))); // Indx_Type idxReal = Coord2IdxA(realHM, Coord2(y,x)); // // realHMVals[idxReal] = hMVals[idx]; // } // } // return realHM; // } // // // /*********************** // * PUBLIC: Projection Function // **********************/ // // void ProjectionAlgorithm::ProjectImageStack() { // // if (imageParameter_.verbose) { // std::cout << "Read in " << image_.GetWidth() << "x" << image_.GetHeight() << "x" << image_.GetDepth() << " image" << std::endl; // std::cout << "Image type: " << image_.GetType() << " (0=uint8, 1=uint16)" << std::endl; // std::cout << "Use an " << imageParameter_.radius << "x" << imageParameter_.radius << " window (initial grid size)" << std::endl; // std::cout << "Distance for second selection step: " << imageParameter_.distance << std::endl; // std::cout << "Looking at the top " << imageParameter_.threshold << " pixels" << std::endl; // } // // if (parametersAreSet_ == false ) { // std::cout << "Parameters are not set" << std::endl; // return; // } // // if (image_.GetType() == UINT8_TYPE) { // Project_8bit(); // } else if (image_.GetType() == UINT16_TYPE) { // Fast_Project_16bit(); // } else { // std::cout << "This image type is not supported (only uint8 and unit16!" << std::endl; // return; // } // } // // /*********************** // * PUBLIC: Draw interpolated height map // **********************/ // // void ProjectionAlgorithm::DrawInterpolatedHeightMap(std::string outputPath) { // // for (int i=0; i<imageParameter_.layer; i++) { // std::string outputImgPath = outputPath + "_r" + std::to_string(2*imageParameter_.radius) + "_d" + std::to_string(imageParameter_.distance) + "_t"+ std::to_string(imageParameter_.threshold) + "_layer" + std::to_string(i) +"_interpolatedHeightMap.tif"; // Write_Image((char *)outputImgPath.data(),interpolatedHeightMaps_[i],DONT_PRESS); // } // } // // void ProjectionAlgorithm::DrawHeightMap(std::string outputPath) { // // for (int i=0; i<imageParameter_.layer; i++) { // Array * realHM = RescaleHeightMap(heightMaps_[i]); // // std::string outputImgPath = outputPath + "_r" + std::to_string(2*imageParameter_.radius) + "_d" + std::to_string(imageParameter_.distance) + "_t"+ std::to_string(imageParameter_.threshold) + "_layer" + std::to_string(i) +"_heightMap.tif"; // Write_Image((char *)outputImgPath.data(),realHM,DONT_PRESS); // // Free_Array(realHM); // } // } // // void ProjectionAlgorithm::DrawDownsampledHeightMap(std::string outputPath) { // // for (int i=0; i<imageParameter_.layer; i++) { // std::string outputImgPath = outputPath + "_r" + std::to_string(2*imageParameter_.radius) + "_d" + std::to_string(imageParameter_.distance) + "_t"+ std::to_string(imageParameter_.threshold) + "_layer" + std::to_string(i) +"_downsampledHeightMap.tif"; // Write_Image((char *)outputImgPath.data(),heightMaps_[i],DONT_PRESS); // } // } // // void ProjectionAlgorithm::DrawProjection(std::string outputPath) { // // for (int i=0; i<imageParameter_.layer; i++) { // std::string outputImgPath = outputPath + "_r" + std::to_string(2*imageParameter_.radius) + "_d" + std::to_string(imageParameter_.distance) + "_t"+ std::to_string(imageParameter_.threshold) + "_layer" + std::to_string(i) +"_projection.tif"; // // Write_Image((char *)outputImgPath.data(),projections_[i],DONT_PRESS); // } // // } // // // // /*********************** // * PRIVATE: Projection Function for 8bit // **********************/ // // void ProjectionAlgorithm::Project_8bit (){ // // Array * maxim = MaxProjection_8bit(image_, imageParameter_); // Array * index = MaxIndices_8bit(image_, imageParameter_, maxim); // // // ---------- version 1.0 ------------- // // // 1. Select reference height map with a big window (radius) // // Selecting the z planes using the occurrences of the brightest pixel // // std::vector<Array *> levels = PlaneSelection_SeveralLevels_8bit(image_, imageParameter_, maxim, index); // Free_Array(index); // // imageParameter_.radius = imageParameter_.radius/2; // image_.UpdateGrid(2); // // for (int i=0; i<imageParameter_.layer; i++) { // // if (imageParameter_.verbose) { // std::cout << "---- Layer " << i << std::endl; // } // // // 2. Decomposing the compuational window based on the quad tree principle // // new window size: 0.5 * radius // // Smoothing of the resulting height map using local median filtering // // Array * refinedLevel = DecomposeImage(levels[i]); //// Array * brightPixels = BrightPixelDistribution(image_, imageParameter_, maxim, index); // // int filterSizes2 [] = {4,2,1}; // Array * median = Filtering(image_, imageParameter_, refinedLevel, filterSizes2, image_.GetDepth()/10); //// Array * median = Filtering_Confidence(image_, imageParameter_, refinedLevel, filterSizes2, image_.GetDepth()/10, brightPixels); // Free_Array(refinedLevel); // // 3. Select the final height map using a subset of the image stack // // defined by the reference height map and a distance parameter // // // // ------ method 1 ---- variance ------------- // //// Array * substack = Substack_UsingMask_8bit(image_, imageParameter_, median, false); // // Array * level = SelectPlanes_Variance_8bit(image_, imageParameter_, median); // //// Array * test = LocalMedianFilter_InclVar(img, imgPara, level, img.GetDepth(), 1); //// Write_Image("weighted.tif", test, DONT_PRESS); // // //median = Copy_Array(level); // int filterSizes3 [] = {2,1}; // //median = Filtering(img, imgPara, level, img.GetDepth(), filterSizes3); // median = LocalMedianFilter(image_, imageParameter_, level, 1, 1); // // // if (imageParameter_.range) { // GetLevelRange(median); // } // // // Array * corners = InterpolateCorners(image_, imageParameter_, median); // ResultArrays result = InterpolatePlanes_8bit(image_, imageParameter_, maxim, corners); // // heightMaps_.push_back(median); // interpolatedHeightMaps_.push_back(result.interpolatedHeightMap); // projections_.push_back(result.projection); // //// imageParameter_.radius = imageParameter_.radius*2; //// image_.UpdateGrid(0.5); // // // Free_Array(level); // Free_Array(corners); // } // //// imageParameter_.radius = imageParameter_.radius/2; //// image_.UpdateGrid(2); // // for (int i=0; i<imageParameter_.layer; i++) { // Free_Array(levels[i]); // } // // Free_Array(maxim); // } // // // /************************************************************* // * // * Projection Algorithm for a 16-bit image // * // *************************************************************/ // // // void ProjectionAlgorithm::Project_16bit (){ // // Array * maxim = MaxProjection_16bit(image_, imageParameter_); // Array * index = MaxIndices_16bit(image_, imageParameter_, maxim); // // Levels levels = PlaneSelection_2Levels_16bit(image_, imageParameter_, maxim, index); // // std::vector<Array *> levels = PlaneSelection_SeveralLevels_16bit(img, imgPara, maxim, index); // // Free_Array(index); // // for (int i=0; i<imageParameter_.layer; i++) { // // std::cout << "---- Layer " << i << std::endl; // // Array * refinedLevel = DecomposeImage(levels.levels[i]); // // imageParameter_.radius = imageParameter_.radius/2; // image_.UpdateGrid(2); // // int filterSizes [] = {4, 2}; // Array * median = Filtering(image_, imageParameter_, refinedLevel, filterSizes, 1); // Free_Array(refinedLevel); // // Array * substack = Substack_UsingMask_16bit(image_, imageParameter_, median, true); // Free_Array(median); // Array * level = SelectPlanes_Variance_16bit(image_, imageParameter_, substack); // Free_Array(substack); // // median = Copy_Array(level); // int filterSizes2 [] = {2,2,1,1,1}; // median = Filtering(image_, imageParameter_, level, filterSizes2, 1); // // // if (imageParameter_.range) { // GetLevelRange(median); // } // // // Array * corners = InterpolateCorners(image_, imageParameter_, median); // ResultArrays result; // // if (imageParameter_.maxInterpolation) { // result = InterpolatePlanes_16bit_MaxInterpolation(image_, imageParameter_, maxim, corners); // } else { // result = InterpolatePlanes_16bit(image_, imageParameter_, maxim, corners); // } // // heightMaps_.push_back(median); // interpolatedHeightMaps_.push_back(result.interpolatedHeightMap); // projections_.push_back(result.projection); // // imageParameter_.radius = imageParameter_.radius*2; // image_.UpdateGrid(0.5); // // Free_Array(level); // Free_Array(corners); // } // // imageParameter_.radius = imageParameter_.radius/2; // image_.UpdateGrid(2); // // for (int i=0; i<imageParameter_.layer; i++) { // Free_Array(levels.levels[i]); // } // Free_Array(levels.confidence); // Free_Array(maxim); // } // // /************************************************************* // * // * Fast Projection Algorithm for a 16-bit image // * // *************************************************************/ // void ProjectionAlgorithm::Fast_Project_16bit (){ // // image_.SetGrid(imageParameter_.radius); // // Array * img_16bit = Copy_Array(image_.GetImage()); // Array * maxim_16bit = MaxProjection_16bit(image_, imageParameter_); // // // // Works on 8-bit to figure out the height map // image_.ScaleTo8bit(); // // Array * maxim = MaxProjection_8bit(image_, imageParameter_); // Array * index = MaxIndices_8bit(image_, imageParameter_, maxim); // // Levels levels = PlaneSelection_2Levels_16bit(image_, imageParameter_, maxim, index); // #TODO: check 16 / 8 bit // //std::vector<Array *> levels = PlaneSelection_SeveralLevels_8bit(img, imgPara, maxim, index); // Free_Array(index); // // for (int i=0; i<imageParameter_.layer; i++) { // // std::cout << "---- Layer " << i << std::endl; // // Array * refinedLevel = DecomposeImage(levels.levels[i]); // // imageParameter_.radius = imageParameter_.radius/2; // image_.UpdateGrid(2); // // int filterSizes [] = {4, 2}; // Array * median = Filtering_Confidence(image_, imageParameter_, refinedLevel, filterSizes, 1, levels.confidence); // Free_Array(refinedLevel); // // Array * substack = Substack_UsingMask_8bit(image_, imageParameter_, median, true); // // Free_Array(median); // Array * level = SelectPlanes_Variance_8bit(image_, imageParameter_, substack); // Free_Array(substack); // // int filterSizes2 [] = {2,2,1,1,1}; // median = Filtering(image_, imageParameter_, level, filterSizes2, 1); // // if (imageParameter_.range) { // GetLevelRange(median); // } // // // Projections happens on 16-bit // Free_Array(image_.GetImage()); // image_.SetImage(img_16bit); // img_16bit = Copy_Array(image_.GetImage()); // // Array * corners = InterpolateCorners(image_, imageParameter_, median); // ResultArrays result; // // if (imageParameter_.maxInterpolation) { // if (imageParameter_.verbose) { // std::cout << "Interpolation using a maximum projection" << std::endl; // } // result = InterpolatePlanes_16bit_MaxInterpolation(image_, imageParameter_, maxim_16bit, corners); // } else { // result = InterpolatePlanes_16bit(image_, imageParameter_, maxim_16bit, corners); // } // // heightMaps_.push_back(median); // interpolatedHeightMaps_.push_back(result.interpolatedHeightMap); // projections_.push_back(result.projection); // // image_.ScaleTo8bit(); // imageParameter_.radius = imageParameter_.radius*2; // image_.UpdateGrid(0.5); // // // Free_Array(level); // Free_Array(corners); // } // // imageParameter_.radius = imageParameter_.radius/2; // image_.UpdateGrid(2); // // for (int i=0; i<imageParameter_.layer; i++) { // Free_Array(levels.levels[i]); // } // Free_Array(levels.confidence); // Free_Array(maxim); // Free_Array(maxim_16bit); // Free_Array(img_16bit); // } // // // void ProjectionAlgorithm::SetParameters (ParaSet & parameter) { // // imageParameter_ = parameter; // image_.SetGrid(imageParameter_.radius); // parametersAreSet_ = true; // } // // // }
<!DOCTYPE html> <html> <head> <title>The page you were looking for doesn't exist (404)</title> <style type="text/css"> body { background-color: #fff; color: #666; text-align: center; font-family: arial, sans-serif; } div.dialog { width: 25em; padding: 0 4em; margin: 4em auto 0 auto; border: 1px solid #ccc; border-right-color: #999; border-bottom-color: #999; } h1 { font-size: 100%; color: #f00; line-height: 1.5em; } </style> </head> <body> <!-- This file lives in public/404.html --> <div class="dialog"> <h1>The page you were looking for doesn't exist.</h1> <p>You may have mistyped the address or the page may have moved.</p> </div> </body> </html>
public class TestCompletion { public static <T, V> ParallelPipeline<T, V> test(T base, V newStage, T upstream) { return base != null ? new ParallelPipeline<>(base, newStage) : new ParallelPipeline<>(upstream, newStage); } private static class ParallelPipeline<T, V> { public ParallelPipeline(T p0, V p1) { } } }
#import "ZXErrors.h" #import "ZXGenericMultipleBarcodeReader.h" #import "ZXReader.h" #import "ZXResultPoint.h" int const MIN_DIMENSION_TO_RECUR = 100; int const MAX_DEPTH = 4; @interface ZXGenericMultipleBarcodeReader () @property (nonatomic, weak) id<ZXReader> delegate; @end @implementation ZXGenericMultipleBarcodeReader - (id)initWithDelegate:(id<ZXReader>)delegate { if (self = [super init]) { _delegate = delegate; } return self; } - (NSArray *)decodeMultiple:(ZXBinaryBitmap *)image error:(NSError **)error { return [self decodeMultiple:image hints:nil error:error]; } - (NSArray *)decodeMultiple:(ZXBinaryBitmap *)image hints:(ZXDecodeHints *)hints error:(NSError **)error { NSMutableArray *results = [NSMutableArray array]; if (![self doDecodeMultiple:image hints:hints results:results xOffset:0 yOffset:0 currentDepth:0 error:error]) { return nil; } else if (results.count == 0) { if (error) *error = NotFoundErrorInstance(); return nil; } return results; } - (BOOL)doDecodeMultiple:(ZXBinaryBitmap *)image hints:(ZXDecodeHints *)hints results:(NSMutableArray *)results xOffset:(int)xOffset yOffset:(int)yOffset currentDepth:(int)currentDepth error:(NSError **)error { if (currentDepth > MAX_DEPTH) { return YES; } ZXResult *result = [self.delegate decode:image hints:hints error:error]; if (!result) { return NO; } BOOL alreadyFound = NO; for (ZXResult *existingResult in results) { if ([[existingResult text] isEqualToString:[result text]]) { alreadyFound = YES; break; } } if (!alreadyFound) { [results addObject:[self translateResultPoints:result xOffset:xOffset yOffset:yOffset]]; } NSMutableArray *resultPoints = [result resultPoints]; if (resultPoints == nil || [resultPoints count] == 0) { return YES; } int width = [image width]; int height = [image height]; float minX = width; float minY = height; float maxX = 0.0f; float maxY = 0.0f; for (ZXResultPoint *point in resultPoints) { float x = [point x]; float y = [point y]; if (x < minX) { minX = x; } if (y < minY) { minY = y; } if (x > maxX) { maxX = x; } if (y > maxY) { maxY = y; } } if (minX > MIN_DIMENSION_TO_RECUR) { return [self doDecodeMultiple:[image crop:0 top:0 width:(int)minX height:height] hints:hints results:results xOffset:xOffset yOffset:yOffset currentDepth:currentDepth + 1 error:error]; } if (minY > MIN_DIMENSION_TO_RECUR) { return [self doDecodeMultiple:[image crop:0 top:0 width:width height:(int)minY] hints:hints results:results xOffset:xOffset yOffset:yOffset currentDepth:currentDepth + 1 error:error]; } if (maxX < width - MIN_DIMENSION_TO_RECUR) { return [self doDecodeMultiple:[image crop:(int)maxX top:0 width:width - (int)maxX height:height] hints:hints results:results xOffset:xOffset + (int)maxX yOffset:yOffset currentDepth:currentDepth + 1 error:error]; } if (maxY < height - MIN_DIMENSION_TO_RECUR) { return [self doDecodeMultiple:[image crop:0 top:(int)maxY width:width height:height - (int)maxY] hints:hints results:results xOffset:xOffset yOffset:yOffset + (int)maxY currentDepth:currentDepth + 1 error:error]; } return YES; } - (ZXResult *)translateResultPoints:(ZXResult *)result xOffset:(int)xOffset yOffset:(int)yOffset { NSArray *oldResultPoints = [result resultPoints]; if (oldResultPoints == nil) { return result; } NSMutableArray *newResultPoints = [NSMutableArray arrayWithCapacity:[oldResultPoints count]]; for (ZXResultPoint *oldPoint in oldResultPoints) { [newResultPoints addObject:[[ZXResultPoint alloc] initWithX:[oldPoint x] + xOffset y:[oldPoint y] + yOffset]]; } ZXResult *newResult = [ZXResult resultWithText:result.text rawBytes:result.rawBytes length:result.length resultPoints:newResultPoints format:result.barcodeFormat]; [newResult putAllMetadata:result.resultMetadata]; return newResult; } @end
namespace content { namespace { int WindowFormat(gfx::GpuMemoryBuffer::Format format) { switch (format) { case gfx::GpuMemoryBuffer::RGBA_8888: return WINDOW_FORMAT_RGBA_8888; case gfx::GpuMemoryBuffer::RGBX_8888: case gfx::GpuMemoryBuffer::BGRA_8888: NOTREACHED(); return 0; } NOTREACHED(); return 0; } } // namespace GpuMemoryBufferImplSurfaceTexture::GpuMemoryBufferImplSurfaceTexture( gfx::GpuMemoryBufferId id, const gfx::Size& size, Format format, const DestructionCallback& callback, ANativeWindow* native_window) : GpuMemoryBufferImpl(id, size, format, callback), native_window_(native_window), stride_(0) { } GpuMemoryBufferImplSurfaceTexture::~GpuMemoryBufferImplSurfaceTexture() { ANativeWindow_release(native_window_); } // static scoped_ptr<GpuMemoryBufferImpl> GpuMemoryBufferImplSurfaceTexture::CreateFromHandle( const gfx::GpuMemoryBufferHandle& handle, const gfx::Size& size, Format format, const DestructionCallback& callback) { ANativeWindow* native_window = SurfaceTextureManager::GetInstance()-> AcquireNativeWidgetForSurfaceTexture(handle.id); if (!native_window) return scoped_ptr<GpuMemoryBufferImpl>(); ANativeWindow_setBuffersGeometry( native_window, size.width(), size.height(), WindowFormat(format)); return make_scoped_ptr<GpuMemoryBufferImpl>( new GpuMemoryBufferImplSurfaceTexture( handle.id, size, format, callback, native_window)); } void* GpuMemoryBufferImplSurfaceTexture::Map() { TRACE_EVENT0("gpu", "GpuMemoryBufferImplSurfaceTexture::Map"); DCHECK(!mapped_); DCHECK(native_window_); ANativeWindow_Buffer buffer; int status = ANativeWindow_lock(native_window_, &buffer, NULL); if (status) { VLOG(1) << "ANativeWindow_lock failed with error code: " << status; return NULL; } DCHECK_LE(size_.width(), buffer.stride); stride_ = buffer.stride * BytesPerPixel(format_); mapped_ = true; return buffer.bits; } void GpuMemoryBufferImplSurfaceTexture::Unmap() { TRACE_EVENT0("gpu", "GpuMemoryBufferImplSurfaceTexture::Unmap"); DCHECK(mapped_); ANativeWindow_unlockAndPost(native_window_); mapped_ = false; } uint32 GpuMemoryBufferImplSurfaceTexture::GetStride() const { return stride_; } gfx::GpuMemoryBufferHandle GpuMemoryBufferImplSurfaceTexture::GetHandle() const { gfx::GpuMemoryBufferHandle handle; handle.type = gfx::SURFACE_TEXTURE_BUFFER; handle.id = id_; return handle; } } // namespace content
import { ModalsComponent } from './modals.component'; import { DynamicModalComponent } from './dynamic-modal/dynamic-modal.component'; export const MODALS_MODULE = [ ModalsComponent, DynamicModalComponent ];
package unversioned import ( "errors" "net/http" "k8s.io/kubernetes/pkg/client/transport" "k8s.io/kubernetes/pkg/util" ) // KubeletClient is an interface for all kubelet functionality type KubeletClient interface { ConnectionInfoGetter } type ConnectionInfoGetter interface { GetConnectionInfo(host string) (scheme string, port uint, transport http.RoundTripper, err error) } // HTTPKubeletClient is the default implementation of KubeletHealthchecker, accesses the kubelet over HTTP. type HTTPKubeletClient struct { Client *http.Client Config *KubeletConfig } func MakeTransport(config *KubeletConfig) (http.RoundTripper, error) { tlsConfig, err := transport.TLSConfigFor(config.transportConfig()) if err != nil { return nil, err } rt := http.DefaultTransport if config.Dial != nil || tlsConfig != nil { rt = util.SetTransportDefaults(&http.Transport{ Dial: config.Dial, TLSClientConfig: tlsConfig, }) } return transport.HTTPWrappersForConfig(config.transportConfig(), rt) } // TODO: this structure is questionable, it should be using client.Config and overriding defaults. func NewKubeletClient(config *KubeletConfig) (KubeletClient, error) { transport, err := MakeTransport(config) if err != nil { return nil, err } c := &http.Client{ Transport: transport, Timeout: config.HTTPTimeout, } return &HTTPKubeletClient{ Client: c, Config: config, }, nil } func (c *HTTPKubeletClient) GetConnectionInfo(host string) (string, uint, http.RoundTripper, error) { scheme := "http" if c.Config.EnableHttps { scheme = "https" } return scheme, c.Config.Port, c.Client.Transport, nil } // FakeKubeletClient is a fake implementation of KubeletClient which returns an error // when called. It is useful to pass to the master in a test configuration with // no kubelets. type FakeKubeletClient struct{} func (c FakeKubeletClient) GetConnectionInfo(host string) (string, uint, http.RoundTripper, error) { return "", 0, nil, errors.New("Not Implemented") }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" /> <meta name="viewport" content="width=device-width,initial-scale=1"> <meta http-equiv="x-ua-compatible" content="ie=edge"> <meta name="lang:clipboard.copy" content="Copy to clipboard"> <meta name="lang:clipboard.copied" content="Copied to clipboard"> <meta name="lang:search.language" content="en"> <meta name="lang:search.pipeline.stopwords" content="True"> <meta name="lang:search.pipeline.trimmer" content="True"> <meta name="lang:search.result.none" content="No matching documents"> <meta name="lang:search.result.one" content="1 matching document"> <meta name="lang:search.result.other" content="# matching documents"> <meta name="lang:search.tokenizer" content="[\s\-]+"> <link href="https://fonts.gstatic.com/" rel="preconnect" crossorigin> <link href="https://fonts.googleapis.com/css?family=Roboto+Mono:400,500,700|Roboto:300,400,400i,700&display=fallback" rel="stylesheet"> <style> body, input { font-family: "Roboto", "Helvetica Neue", Helvetica, Arial, sans-serif } code, kbd, pre { font-family: "Roboto Mono", "Courier New", Courier, monospace } </style> <link rel="stylesheet" href="../_static/stylesheets/application.css"/> <link rel="stylesheet" href="../_static/stylesheets/application-palette.css"/> <link rel="stylesheet" href="../_static/stylesheets/application-fixes.css"/> <link rel="stylesheet" href="../_static/fonts/material-icons.css"/> <meta name="theme-color" content="#3f51b5"> <script src="../_static/javascripts/modernizr.js"></script> <title>statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_approximate_diffuse &#8212; statsmodels</title> <link rel="icon" type="image/png" sizes="32x32" href="../_static/icons/favicon-32x32.png"> <link rel="icon" type="image/png" sizes="16x16" href="../_static/icons/favicon-16x16.png"> <link rel="manifest" href="../_static/icons/site.webmanifest"> <link rel="mask-icon" href="../_static/icons/safari-pinned-tab.svg" color="#919191"> <meta name="msapplication-TileColor" content="#2b5797"> <meta name="msapplication-config" content="../_static/icons/browserconfig.xml"> <link rel="stylesheet" href="../_static/stylesheets/examples.css"> <link rel="stylesheet" href="../_static/stylesheets/deprecation.css"> <link rel="stylesheet" type="text/css" href="../_static/pygments.css" /> <link rel="stylesheet" type="text/css" href="../_static/material.css" /> <link rel="stylesheet" type="text/css" href="../_static/graphviz.css" /> <link rel="stylesheet" type="text/css" href="../_static/plot_directive.css" /> <script data-url_root="../" id="documentation_options" src="../_static/documentation_options.js"></script> <script src="../_static/jquery.js"></script> <script src="../_static/underscore.js"></script> <script src="../_static/doctools.js"></script> <script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script> <link rel="shortcut icon" href="../_static/favicon.ico"/> <link rel="author" title="About these documents" href="../about.html" /> <link rel="index" title="Index" href="../genindex.html" /> <link rel="search" title="Search" href="../search.html" /> <link rel="next" title="statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_known" href="statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_known.html" /> <link rel="prev" title="statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize" href="statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize.html" /> </head> <body dir=ltr data-md-color-primary=indigo data-md-color-accent=blue> <svg class="md-svg"> <defs data-children-count="0"> <svg xmlns="http://www.w3.org/2000/svg" width="416" height="448" viewBox="0 0 416 448" id="__github"><path fill="currentColor" d="M160 304q0 10-3.125 20.5t-10.75 19T128 352t-18.125-8.5-10.75-19T96 304t3.125-20.5 10.75-19T128 256t18.125 8.5 10.75 19T160 304zm160 0q0 10-3.125 20.5t-10.75 19T288 352t-18.125-8.5-10.75-19T256 304t3.125-20.5 10.75-19T288 256t18.125 8.5 10.75 19T320 304zm40 0q0-30-17.25-51T296 232q-10.25 0-48.75 5.25Q229.5 240 208 240t-39.25-2.75Q130.75 232 120 232q-29.5 0-46.75 21T56 304q0 22 8 38.375t20.25 25.75 30.5 15 35 7.375 37.25 1.75h42q20.5 0 37.25-1.75t35-7.375 30.5-15 20.25-25.75T360 304zm56-44q0 51.75-15.25 82.75-9.5 19.25-26.375 33.25t-35.25 21.5-42.5 11.875-42.875 5.5T212 416q-19.5 0-35.5-.75t-36.875-3.125-38.125-7.5-34.25-12.875T37 371.5t-21.5-28.75Q0 312 0 260q0-59.25 34-99-6.75-20.5-6.75-42.5 0-29 12.75-54.5 27 0 47.5 9.875t47.25 30.875Q171.5 96 212 96q37 0 70 8 26.25-20.5 46.75-30.25T376 64q12.75 25.5 12.75 54.5 0 21.75-6.75 42 34 40 34 99.5z"/></svg> </defs> </svg> <input class="md-toggle" data-md-toggle="drawer" type="checkbox" id="__drawer"> <input class="md-toggle" data-md-toggle="search" type="checkbox" id="__search"> <label class="md-overlay" data-md-component="overlay" for="__drawer"></label> <a href="#generated/statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_approximate_diffuse" tabindex="1" class="md-skip"> Skip to content </a> <header class="md-header" data-md-component="header"> <nav class="md-header-nav md-grid"> <div class="md-flex navheader"> <div class="md-flex__cell md-flex__cell--shrink"> <a href="../index.html" title="statsmodels" class="md-header-nav__button md-logo"> <img src="../_static/statsmodels-logo-v2-bw.svg" height="26" alt="statsmodels logo"> </a> </div> <div class="md-flex__cell md-flex__cell--shrink"> <label class="md-icon md-icon--menu md-header-nav__button" for="__drawer"></label> </div> <div class="md-flex__cell md-flex__cell--stretch"> <div class="md-flex__ellipsis md-header-nav__title" data-md-component="title"> <span class="md-header-nav__topic">statsmodels v0.13.2</span> <span class="md-header-nav__topic"> statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_approximate_diffuse </span> </div> </div> <div class="md-flex__cell md-flex__cell--shrink"> <label class="md-icon md-icon--search md-header-nav__button" for="__search"></label> <div class="md-search" data-md-component="search" role="dialog"> <label class="md-search__overlay" for="__search"></label> <div class="md-search__inner" role="search"> <form class="md-search__form" action="../search.html" method="get" name="search"> <input type="text" class="md-search__input" name="q" placeholder="Search" autocapitalize="off" autocomplete="off" spellcheck="false" data-md-component="query" data-md-state="active"> <label class="md-icon md-search__icon" for="__search"></label> <button type="reset" class="md-icon md-search__icon" data-md-component="reset" tabindex="-1"> &#xE5CD; </button> </form> <div class="md-search__output"> <div class="md-search__scrollwrap" data-md-scrollfix> <div class="md-search-result" data-md-component="result"> <div class="md-search-result__meta"> Type to start searching </div> <ol class="md-search-result__list"></ol> </div> </div> </div> </div> </div> </div> <div class="md-flex__cell md-flex__cell--shrink"> <div class="md-header-nav__source"> <a href="https://github.com/statsmodels/statsmodels" title="Go to repository" class="md-source" data-md-source="github"> <div class="md-source__icon"> <svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 24 24" width="28" height="28"> <use xlink:href="#__github" width="24" height="24"></use> </svg> </div> <div class="md-source__repository"> statsmodels </div> </a> </div> </div> <script src="../_static/javascripts/version_dropdown.js"></script> <script> var json_loc = "../../versions-v2.json", target_loc = "../../", text = "Versions"; $( document ).ready( add_version_dropdown(json_loc, target_loc, text)); </script> </div> </nav> </header> <div class="md-container"> <nav class="md-tabs" data-md-component="tabs"> <div class="md-tabs__inner md-grid"> <ul class="md-tabs__list"> <li class="md-tabs__item"><a href="../user-guide.html" class="md-tabs__link">User Guide</a></li> <li class="md-tabs__item"><a href="../statespace.html" class="md-tabs__link">Time Series Analysis by State Space Methods <code class="xref py py-mod docutils literal notranslate"><span class="pre">statespace</span></code></a></li> <li class="md-tabs__item"><a href="statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.html" class="md-tabs__link">statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ</a></li> </ul> </div> </nav> <main class="md-main"> <div class="md-main__inner md-grid" data-md-component="container"> <div class="md-sidebar md-sidebar--primary" data-md-component="navigation"> <div class="md-sidebar__scrollwrap"> <div class="md-sidebar__inner"> <nav class="md-nav md-nav--primary" data-md-level="0"> <label class="md-nav__title md-nav__title--site" for="__drawer"> <a href="../index.html" title="statsmodels" class="md-nav__button md-logo"> <img src="../_static/statsmodels-logo-v2-bw.svg" alt=" logo" width="48" height="48"> </a> <a href="../index.html" title="statsmodels">statsmodels v0.13.2</a> </label> <div class="md-nav__source"> <a href="https://github.com/statsmodels/statsmodels" title="Go to repository" class="md-source" data-md-source="github"> <div class="md-source__icon"> <svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 24 24" width="28" height="28"> <use xlink:href="#__github" width="24" height="24"></use> </svg> </div> <div class="md-source__repository"> statsmodels </div> </a> </div> <ul class="md-nav__list"> <li class="md-nav__item"> <a href="../install.html" class="md-nav__link">Installing statsmodels</a> </li> <li class="md-nav__item"> <a href="../gettingstarted.html" class="md-nav__link">Getting started</a> </li> <li class="md-nav__item"> <a href="../user-guide.html" class="md-nav__link">User Guide</a> <ul class="md-nav__list"> <li class="md-nav__item"> <a href="../user-guide.html#background" class="md-nav__link">Background</a> </li> <li class="md-nav__item"> <a href="../user-guide.html#regression-and-linear-models" class="md-nav__link">Regression and Linear Models</a> </li> <li class="md-nav__item"> <a href="../user-guide.html#time-series-analysis" class="md-nav__link">Time Series Analysis</a> <ul class="md-nav__list"> <li class="md-nav__item"> <a href="../tsa.html" class="md-nav__link">Time Series analysis <code class="xref py py-mod docutils literal notranslate"><span class="pre">tsa</span></code></a> </li> <li class="md-nav__item"> <a href="../statespace.html" class="md-nav__link">Time Series Analysis by State Space Methods <code class="xref py py-mod docutils literal notranslate"><span class="pre">statespace</span></code></a> </li> <li class="md-nav__item"> <a href="../vector_ar.html" class="md-nav__link">Vector Autoregressions <code class="xref py py-mod docutils literal notranslate"><span class="pre">tsa.vector_ar</span></code></a> </li></ul> </li> <li class="md-nav__item"> <a href="../user-guide.html#other-models" class="md-nav__link">Other Models</a> </li> <li class="md-nav__item"> <a href="../user-guide.html#statistics-and-tools" class="md-nav__link">Statistics and Tools</a> </li> <li class="md-nav__item"> <a href="../user-guide.html#data-sets" class="md-nav__link">Data Sets</a> </li> <li class="md-nav__item"> <a href="../user-guide.html#sandbox" class="md-nav__link">Sandbox</a> </li></ul> </li> <li class="md-nav__item"> <a href="../examples/index.html" class="md-nav__link">Examples</a> </li> <li class="md-nav__item"> <a href="../api.html" class="md-nav__link">API Reference</a> </li> <li class="md-nav__item"> <a href="../about.html" class="md-nav__link">About statsmodels</a> </li> <li class="md-nav__item"> <a href="../dev/index.html" class="md-nav__link">Developer Page</a> </li> <li class="md-nav__item"> <a href="../release/index.html" class="md-nav__link">Release Notes</a> </li> </ul> </nav> </div> </div> </div> <div class="md-sidebar md-sidebar--secondary" data-md-component="toc"> <div class="md-sidebar__scrollwrap"> <div class="md-sidebar__inner"> <nav class="md-nav md-nav--secondary"> <ul class="md-nav__list" data-md-scrollfix=""> <li class="md-nav__item"><a class="md-nav__extra_link" href="../_sources/generated/statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_approximate_diffuse.rst.txt">Show Source</a> </li> <li id="searchbox" class="md-nav__item"></li> </ul> </nav> </div> </div> </div> <div class="md-content"> <article class="md-content__inner md-typeset" role="main"> <section id="statsmodels-tsa-statespace-dynamic-factor-mq-dynamicfactormq-initialize-approximate-diffuse"> <h1 id="generated-statsmodels-tsa-statespace-dynamic-factor-mq-dynamicfactormq-initialize-approximate-diffuse--page-root">statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_approximate_diffuse<a class="headerlink" href="#generated-statsmodels-tsa-statespace-dynamic-factor-mq-dynamicfactormq-initialize-approximate-diffuse--page-root" title="Permalink to this headline">¶</a></h1> <dl class="py method"> <dt class="sig sig-object py" id="statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_approximate_diffuse"> <span class="sig-prename descclassname"><span class="pre">DynamicFactorMQ.</span></span><span class="sig-name descname"><span class="pre">initialize_approximate_diffuse</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">variance</span></span><span class="o"><span class="pre">=</span></span><span class="default_value"><span class="pre">None</span></span></em><span class="sig-paren">)</span><a class="headerlink" href="#statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_approximate_diffuse" title="Permalink to this definition">¶</a></dt> <dd><p>Initialize approximate diffuse</p> </dd></dl> </section> </article> </div> </div> </main> </div> <footer class="md-footer"> <div class="md-footer-nav"> <nav class="md-footer-nav__inner md-grid"> <a href="statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize.html" title="statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize" class="md-flex md-footer-nav__link md-footer-nav__link--prev" rel="prev"> <div class="md-flex__cell md-flex__cell--shrink"> <i class="md-icon md-icon--arrow-back md-footer-nav__button"></i> </div> <div class="md-flex__cell md-flex__cell--stretch md-footer-nav__title"> <span class="md-flex__ellipsis"> <span class="md-footer-nav__direction"> Previous </span> statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize </span> </div> </a> <a href="statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_known.html" title="statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_known" class="md-flex md-footer-nav__link md-footer-nav__link--next" rel="next"> <div class="md-flex__cell md-flex__cell--stretch md-footer-nav__title"><span class="md-flex__ellipsis"> <span class="md-footer-nav__direction"> Next </span> statsmodels.tsa.statespace.dynamic_factor_mq.DynamicFactorMQ.initialize_known </span> </div> <div class="md-flex__cell md-flex__cell--shrink"><i class="md-icon md-icon--arrow-forward md-footer-nav__button"></i> </div> </a> </nav> </div> <div class="md-footer-meta md-typeset"> <div class="md-footer-meta__inner md-grid"> <div class="md-footer-copyright"> <div class="md-footer-copyright__highlight"> &#169; Copyright 2009-2019, Josef Perktold, Skipper Seabold, Jonathan Taylor, statsmodels-developers. </div> Last updated on Feb 08, 2022. <br/> Created using <a href="http://www.sphinx-doc.org/">Sphinx</a> 4.4.0. and <a href="https://github.com/bashtage/sphinx-material/">Material for Sphinx</a> </div> </div> </div> </footer> <script src="../_static/javascripts/application.js"></script> <script>app.initialize({version: "1.0.4", url: {base: ".."}})</script> </body> </html>
create database datapersistentie; use datapersistentie; CREATE USER sqluser IDENTIFIED BY 'sqluserpw'; grant usage on *.* to sqluser@localhost identified by 'sqluserpw'; grant all privileges on datapersistentie.* to sqluser@localhost;
/** * @module ol/interaction/KeyboardPan */ import EventType from '../events/EventType.js'; import Interaction, {pan} from './Interaction.js'; import KeyCode from '../events/KeyCode.js'; import {noModifierKeys, targetNotEditable} from '../events/condition.js'; import {rotate as rotateCoordinate} from '../coordinate.js'; /** * @typedef {Object} Options * @property {import("../events/condition.js").Condition} [condition] A function that * takes an {@link module:ol/MapBrowserEvent~MapBrowserEvent} and returns a * boolean to indicate whether that event should be handled. Default is * {@link module:ol/events/condition.noModifierKeys} and * {@link module:ol/events/condition.targetNotEditable}. * @property {number} [duration=100] Animation duration in milliseconds. * @property {number} [pixelDelta=128] The amount of pixels to pan on each key * press. */ /** * @classdesc * Allows the user to pan the map using keyboard arrows. * Note that, although this interaction is by default included in maps, * the keys can only be used when browser focus is on the element to which * the keyboard events are attached. By default, this is the map div, * though you can change this with the `keyboardEventTarget` in * {@link module:ol/Map~Map}. `document` never loses focus but, for any other * element, focus will have to be on, and returned to, this element if the keys * are to function. * See also {@link module:ol/interaction/KeyboardZoom~KeyboardZoom}. * @api */ class KeyboardPan extends Interaction { /** * @param {Options} [opt_options] Options. */ constructor(opt_options) { super(); const options = opt_options || {}; /** * @private * @param {import("../MapBrowserEvent.js").default} mapBrowserEvent Browser event. * @return {boolean} Combined condition result. */ this.defaultCondition_ = function (mapBrowserEvent) { return ( noModifierKeys(mapBrowserEvent) && targetNotEditable(mapBrowserEvent) ); }; /** * @private * @type {import("../events/condition.js").Condition} */ this.condition_ = options.condition !== undefined ? options.condition : this.defaultCondition_; /** * @private * @type {number} */ this.duration_ = options.duration !== undefined ? options.duration : 100; /** * @private * @type {number} */ this.pixelDelta_ = options.pixelDelta !== undefined ? options.pixelDelta : 128; } /** * Handles the {@link module:ol/MapBrowserEvent map browser event} if it was a * `KeyEvent`, and decides the direction to pan to (if an arrow key was * pressed). * @param {import("../MapBrowserEvent.js").default} mapBrowserEvent Map browser event. * @return {boolean} `false` to stop event propagation. * @this {KeyboardPan} */ handleEvent(mapBrowserEvent) { let stopEvent = false; if (mapBrowserEvent.type == EventType.KEYDOWN) { const keyEvent = /** @type {KeyboardEvent} */ ( mapBrowserEvent.originalEvent ); const keyCode = keyEvent.keyCode; if ( this.condition_(mapBrowserEvent) && (keyCode == KeyCode.DOWN || keyCode == KeyCode.LEFT || keyCode == KeyCode.RIGHT || keyCode == KeyCode.UP) ) { const map = mapBrowserEvent.map; const view = map.getView(); const mapUnitsDelta = view.getResolution() * this.pixelDelta_; let deltaX = 0, deltaY = 0; if (keyCode == KeyCode.DOWN) { deltaY = -mapUnitsDelta; } else if (keyCode == KeyCode.LEFT) { deltaX = -mapUnitsDelta; } else if (keyCode == KeyCode.RIGHT) { deltaX = mapUnitsDelta; } else { deltaY = mapUnitsDelta; } const delta = [deltaX, deltaY]; rotateCoordinate(delta, view.getRotation()); pan(view, delta, this.duration_); keyEvent.preventDefault(); stopEvent = true; } } return !stopEvent; } } export default KeyboardPan;
package org.apache.tinkerpop.gremlin.structure.io.graphson; import org.apache.tinkerpop.gremlin.process.remote.traversal.DefaultRemoteTraverser; import org.apache.tinkerpop.gremlin.process.traversal.Bytecode; import org.apache.tinkerpop.gremlin.process.traversal.P; import org.apache.tinkerpop.gremlin.process.traversal.TextP; import org.apache.tinkerpop.gremlin.process.traversal.Traverser; import org.apache.tinkerpop.shaded.jackson.databind.JsonMappingException; import org.apache.tinkerpop.shaded.jackson.databind.ObjectMapper; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.time.Instant; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.UUID; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.hamcrest.core.StringContains.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.fail; /** * Tests automatic typed serialization/deserialization for GraphSON 2.0+. * * @author Kevin Gallardo (https://kgdo.me) * @author Stephen Mallette (http://stephen.genoprime.com) */ @RunWith(Parameterized.class) public class GraphSONMapperPartialEmbeddedTypeTest extends AbstractGraphSONTest { @Parameterized.Parameters(name = "{0}") public static Iterable<Object[]> data() { return Arrays.asList(new Object[][]{ {"v2", GraphSONMapper.build().version(GraphSONVersion.V2_0) .addCustomModule(GraphSONXModuleV2d0.build().create(false)) .typeInfo(TypeInfo.PARTIAL_TYPES).create().createMapper()}, {"v3", GraphSONMapper.build().version(GraphSONVersion.V3_0) .addCustomModule(GraphSONXModuleV3d0.build().create(false)) .typeInfo(TypeInfo.PARTIAL_TYPES).create().createMapper()} }); } @Parameterized.Parameter(1) public ObjectMapper mapper; @Parameterized.Parameter(0) public String version; @Test public void elementOrderShouldNotMatter() throws Exception { final String bytecodeJSONFail1 = "{\"@type\":\"g:Bytecode\",\"@value\":{\"step\":[[\"addV\",\"poc_int\"],[\"property\",\"bigint1value\",{\"@type\":\"g:Int32\",\"@value\":-4294967295}]]}}"; final String bytecodeJSONFail2 = "{\"@value\":{\"step\":[[\"addV\",\"poc_int\"],[\"property\",\"bigint1value\",{\"@value\":-4294967295,\"@type\":\"g:Int32\"}]]},\"@type\":\"g:Bytecode\"}"; // first validate the failures of TINKERPOP-1738 - prior to the jackson fix on 2.9.4 one of these would have // passed based on the ordering of the properties try { mapper.readValue(bytecodeJSONFail1, Bytecode.class); fail("Should have thrown an error because 'bigint1value' is not an int32"); } catch (Exception ex) { assertThat(ex, instanceOf(JsonMappingException.class)); } try { mapper.readValue(bytecodeJSONFail2, Bytecode.class); fail("Should have thrown an error because 'bigint1value' is not an int32"); } catch (Exception ex) { assertThat(ex, instanceOf(JsonMappingException.class)); } // now do a legit parsing based on order final String bytecodeJSON1 = "{\"@type\":\"g:Bytecode\",\"@value\":{\"step\":[[\"addV\",\"poc_int\"],[\"property\",\"bigint1value\",{\"@type\":\"g:Int64\",\"@value\":-4294967295}]]}}"; final String bytecodeJSON2 = "{\"@value\":{\"step\":[[\"addV\",\"poc_int\"],[\"property\",\"bigint1value\",{\"@value\":-4294967295,\"@type\":\"g:Int64\"}]]},\"@type\":\"g:Bytecode\"}"; final Bytecode bytecode1 = mapper.readValue(bytecodeJSON1, Bytecode.class); final Bytecode bytecode2 = mapper.readValue(bytecodeJSON2, Bytecode.class); assertEquals(bytecode1, bytecode2); } @Test public void shouldSerializeDeserializeNestedCollectionsAndMapAndTypedValuesCorrectly() throws Exception { // Trying to fail the TypeDeserializer type detection final UUID uuid = UUID.randomUUID(); final List<Object> myList = new ArrayList<>(); final List<Object> myList2 = new ArrayList<>(); myList2.add(UUID.randomUUID()); myList2.add(33L); myList2.add(84); final Map<String,Object> map2 = new HashMap<>(); map2.put("eheh", UUID.randomUUID()); map2.put("normal", "normal"); myList2.add(map2); final Map<String, Object> map1 = new HashMap<>(); map1.put("hello", "world"); map1.put("test", uuid); map1.put("hehe", myList2); myList.add(map1); myList.add("kjkj"); myList.add(UUID.randomUUID()); assertEquals(myList, serializeDeserializeAuto(mapper, myList)); // no "@value" property String s = "{\""+GraphSONTokens.VALUETYPE+"\":\"" + GraphSONTokens.GREMLIN_TYPE_NAMESPACE + ":UUID\", \"test\":2}"; Map<String,Object> map = new LinkedHashMap<>(); map.put(GraphSONTokens.VALUETYPE, GraphSONTokens.GREMLIN_TYPE_NAMESPACE + ":UUID"); map.put("test", 2); Object res = mapper.readValue(s, Object.class); assertEquals(map, res); // "@value" and "@type" property reversed s = "{\""+GraphSONTokens.VALUEPROP+"\":2, \"" + GraphSONTokens.VALUETYPE + "\":\"" + GraphSONTokens.GREMLIN_TYPE_NAMESPACE + ":Int64\"}"; res = mapper.readValue(s, Object.class); assertEquals(res, 2L); assertEquals(res.getClass(), Long.class); // no "@type" property. s = "{\""+GraphSONTokens.VALUEPROP + "\":2, \"id\":2}"; map = new LinkedHashMap<>(); map.put(GraphSONTokens.VALUEPROP, 2); map.put("id", 2); res = mapper.readValue(s, Object.class); assertEquals(res, map); } @Test public void shouldFailIfMoreThanTwoPropertiesInATypePattern() { String s = "{\"" + GraphSONTokens.VALUEPROP + "\":2, \"" + GraphSONTokens.VALUETYPE + "\":\""+GraphSONTokens.GREMLIN_TYPE_NAMESPACE +":Int64\", \"hello\": \"world\"}"; try { mapper.readValue(s, Object.class); fail("Should have failed deserializing because there's more than properties in the type."); } catch (IOException e) { assertThat(e.getMessage(), containsString("Detected the type pattern in the JSON payload but the map containing the types and values contains other fields. This is not allowed by the deserializer.")); } s = "{\"" + GraphSONTokens.VALUETYPE + "\":\""+GraphSONTokens.GREMLIN_TYPE_NAMESPACE +":Int64\",\"" + GraphSONTokens.VALUEPROP + "\":2, \"hello\": \"world\"}"; try { mapper.readValue(s, Object.class); fail("Should have failed deserializing because there's more than properties in the type."); } catch (IOException e) { assertThat(e.getMessage(), containsString("Detected the type pattern in the JSON payload but the map containing the types and values contains other fields. This is not allowed by the deserializer.")); } } @Test public void shouldFailIfTypeSpecifiedIsNotSameTypeInPayload() { final ZoneOffset o = ZonedDateTime.now().getOffset(); final ByteArrayOutputStream stream = new ByteArrayOutputStream(); try { mapper.writeValue(stream, o); final InputStream inputStream = new ByteArrayInputStream(stream.toByteArray()); // What has been serialized is a ZoneOffset with the type, but the user explicitly requires another type. mapper.readValue(inputStream, Instant.class); fail("Should have failed decoding the value"); } catch (Exception e) { assertThat(e.getMessage(), containsString("Could not deserialize the JSON value as required. Nested exception: java.lang.InstantiationException: Cannot deserialize the value with the detected type contained in the JSON ('" + GraphSONTokens.GREMLINX_TYPE_NAMESPACE + ":ZoneOffset') to the type specified in parameter to the object mapper (class java.time.Instant). Those types are incompatible.")); } } @Test public void shouldHandleRawPOJOs() throws Exception { final FunObject funObject = new FunObject(); funObject.setVal("test"); assertEquals(funObject.toString(), serializeDeserialize(mapper, funObject, FunObject.class).toString()); assertEquals(funObject.getClass(), serializeDeserialize(mapper, funObject, FunObject.class).getClass()); } @Test public void shouldHandleMapWithTypesUsingEmbedTypeSettingV2d0() throws Exception { final ObjectMapper mapper = GraphSONMapper.build() .version(GraphSONVersion.V2_0) .typeInfo(TypeInfo.PARTIAL_TYPES) .create() .createMapper(); final Map<String,Object> m = new HashMap<>(); m.put("test", 100L); final String json = mapper.writeValueAsString(m); final Map read = mapper.readValue(json, HashMap.class); assertEquals(100L, read.get("test")); } @Test public void shouldNotHandleMapWithTypesUsingEmbedTypeSettingV2d0() throws Exception { final ObjectMapper mapper = GraphSONMapper.build() .version(GraphSONVersion.V2_0) .typeInfo(TypeInfo.NO_TYPES) .create() .createMapper(); final Map<String,Object> m = new HashMap<>(); m.put("test", 100L); final String json = mapper.writeValueAsString(m); final Map read = mapper.readValue(json, HashMap.class); assertEquals(100, read.get("test")); } @Test public void shouldHandleMapWithTypesUsingEmbedTypeSettingV1d0() throws Exception { final ObjectMapper mapper = GraphSONMapper.build() .version(GraphSONVersion.V1_0) .typeInfo(TypeInfo.PARTIAL_TYPES) .create() .createMapper(); final Map<String,Object> m = new HashMap<>(); m.put("test", 100L); final String json = mapper.writeValueAsString(m); final Map read = mapper.readValue(json, HashMap.class); assertEquals(100L, read.get("test")); } @Test public void shouldNotHandleMapWithTypesUsingEmbedTypeSettingV1d0() throws Exception { final ObjectMapper mapper = GraphSONMapper.build() .version(GraphSONVersion.V1_0) .typeInfo(TypeInfo.NO_TYPES) .create() .createMapper(); final Map<String,Object> m = new HashMap<>(); m.put("test", 100L); final String json = mapper.writeValueAsString(m); final Map read = mapper.readValue(json, HashMap.class); assertEquals(100, read.get("test")); } @Test public void shouldLooseTypesInfoWithGraphSONNoType() throws Exception { final ObjectMapper mapper = GraphSONMapper.build() .version(GraphSONVersion.V2_0) .typeInfo(TypeInfo.NO_TYPES) .create() .createMapper(); final UUID uuid = UUID.randomUUID(); final List<Object> myList = new ArrayList<>(); final List<Object> myList2 = new ArrayList<>(); myList2.add(UUID.randomUUID()); myList2.add(33L); myList2.add(84); final Map<String,Object> map2 = new HashMap<>(); map2.put("eheh", UUID.randomUUID()); map2.put("normal", "normal"); myList2.add(map2); final Map<String, Object> map1 = new HashMap<>(); map1.put("hello", "world"); map1.put("test", uuid); map1.put("hehe", myList2); myList.add(map1); myList.add("kjkj"); myList.add(UUID.randomUUID()); final String json = mapper.writeValueAsString(myList); final Object read = mapper.readValue(json, Object.class); // Not equals because of type loss assertNotEquals(myList, read); } @Test public void shouldHandleDefaultRemoteTraverser() throws Exception { final DefaultRemoteTraverser<String> o = new DefaultRemoteTraverser<>("test", 100); assertEquals(o, serializeDeserialize(mapper, o, Traverser.class)); } @Test public void shouldHandleVariantsOfP() throws Exception { final List<P> variantsOfP = Arrays.asList( P.between(1,2), P.eq(1), P.gt(1), P.gte(1), P.inside(1,2), P.lt(1), P.lte(1), P.neq(1), P.not(P.eq(1)), P.outside(1,2), P.within(1), P.within(1,2,3,4), P.within(Arrays.asList(1,2,3,4)), P.without(1), P.without(1,2,3,4), P.without(Arrays.asList(1,2,3,4)), P.eq(1).and(P.eq(2)), P.eq(1).or(P.eq(2)), TextP.containing("ark"), TextP.startingWith("mar"), TextP.endingWith("ko"), TextP.endingWith("ko").and(P.gte("mar")), P.gte("mar").and(TextP.endingWith("ko"))); for (P p : variantsOfP) { if (p instanceof TextP) { assertEquals(p, serializeDeserialize(mapper, p, TextP.class)); } else { assertEquals(p, serializeDeserialize(mapper, p, P.class)); } } } // Class needs to be defined as statics as it's a nested class. public static class FunObject { private String val; public FunObject() { } public String getVal() { return this.val; } public void setVal(String s) { this.val = s; } public String toString() { return this.val; } } }
import React, { PropTypes } from 'react'; const testComponent = props => ( <div> {props.label} </div> ); testComponent.propTypes = { label: PropTypes.string.isRequired, }; testComponent.defaultProps = { label: 'Hello World', }; export default testComponent;
/* * Created on Jul 24, 2004 */ package net.negativetwenty.bookmarker; import net.negativetwenty.bookmarker.models.*; import org.apache.tapestry.*; import org.apache.tapestry.event.*; import org.objectstyle.cayenne.*; /** * Handles interaction with Bookmark views. * * @author nirvdrum */ public class ViewBookmarks extends ApplicationPage { /** * LinkClicked listener. Reacts to a Bookmark's URL being clicked. * * @param cycle */ // TODO This should delegate to the "ViewBookmark" page to remove duplicated work. public void linkClicked(final IRequestCycle cycle) { // Increment the Bookmark's click count. final ObjectId id = (ObjectId) cycle.getServiceParameters()[0]; final Bookmark b = (Bookmark) DataObjectUtils.objectForPK(getDataContext(), id); b.addClick(); // Redirect to the Bookmark's URL. throw new RedirectException(b.getUrl()); } /** * RemoveBookmark listener. Removes a bookmark from the database. * * @param cycle */ public void removeBookmark(final IRequestCycle cycle) { // Fetch the bookmark from the database, given its ObjectId. final Visit v = (Visit) getVisit(); final ObjectId id = (ObjectId) cycle.getServiceParameters()[0]; final Bookmark b = (Bookmark) DataObjectUtils.objectForPK(getDataContext(), id); // Remove the Bookmark from the database. v.removeBookmark(b); // TODO This seems pretty bad. The RDF file creation should probably move into a new service or something. // Update the RDF file. final AddBookmark ab = (AddBookmark) cycle.getPage("AddBookmark"); ab.createRdf(cycle); } /** * ModifyBookmark listener. Sets things up for a Bookmark to be modified. * * @param cycle */ public void modifyBookmark(final IRequestCycle cycle) { // Fetch the bookmark from the database, given its ObjectId. final ObjectId id = (ObjectId) cycle.getServiceParameters()[0]; final Bookmark b = (Bookmark) DataObjectUtils.objectForPK(getDataContext(), id); // Set up an instance of the AddBookmark page for bookmark modification, and redirect to that page. final AddBookmark ab = (AddBookmark) cycle.getPage("AddBookmark"); ab.setBookmark(b); cycle.activate(ab); } /** * Set the heading of the page to be the currently selected category. * * @param event */ public void pageBeginRender(final PageEvent event) { final Visit v = (Visit) getVisit(); final Category c = v.getCategory(); if (null != c) { getComponent("border").setProperty("heading", c.getName()); } } }
#include "unicode/utypes.h" #if U_PLATFORM_HAS_WIN32_API # define VC_EXTRALEAN # define WIN32_LEAN_AND_MEAN # define NOUSER # define NOSERVICE # define NOIME # define NOMCX #include <windows.h> #include <time.h> #endif #if U_PLATFORM_IS_LINUX_BASED && U_HAVE_ELF_H # define U_ELF #endif #ifdef U_ELF # include <elf.h> # if defined(ELFCLASS64) # define U_ELF64 # endif /* Old elf.h headers may not have EM_X86_64, or have EM_X8664 instead. */ # ifndef EM_X86_64 # define EM_X86_64 62 # endif # define ICU_ENTRY_OFFSET 0 #endif #include <stdio.h> #include <stdlib.h> #include "unicode/putil.h" #include "cmemory.h" #include "cstring.h" #include "filestrm.h" #include "toolutil.h" #include "unicode/uclean.h" #include "uoptions.h" #include "pkg_genc.h" enum { kOptHelpH = 0, kOptHelpQuestionMark, kOptDestDir, kOptName, kOptEntryPoint, #ifdef CAN_GENERATE_OBJECTS kOptObject, kOptMatchArch, #endif kOptFilename, kOptAssembly }; static UOption options[]={ /*0*/UOPTION_HELP_H, UOPTION_HELP_QUESTION_MARK, UOPTION_DESTDIR, UOPTION_DEF("name", 'n', UOPT_REQUIRES_ARG), UOPTION_DEF("entrypoint", 'e', UOPT_REQUIRES_ARG), #ifdef CAN_GENERATE_OBJECTS /*5*/UOPTION_DEF("object", 'o', UOPT_NO_ARG), UOPTION_DEF("match-arch", 'm', UOPT_REQUIRES_ARG), #endif UOPTION_DEF("filename", 'f', UOPT_REQUIRES_ARG), UOPTION_DEF("assembly", 'a', UOPT_REQUIRES_ARG) }; #define CALL_WRITECCODE 'c' #define CALL_WRITEASSEMBLY 'a' #define CALL_WRITEOBJECT 'o' extern int main(int argc, char* argv[]) { UBool verbose = TRUE; char writeCode; U_MAIN_INIT_ARGS(argc, argv); options[kOptDestDir].value = "."; /* read command line options */ argc=u_parseArgs(argc, argv, sizeof(options)/sizeof(options[0]), options); /* error handling, printing usage message */ if(argc<0) { fprintf(stderr, "error in command line argument \"%s\"\n", argv[-argc]); } if(argc<0 || options[kOptHelpH].doesOccur || options[kOptHelpQuestionMark].doesOccur) { fprintf(stderr, "usage: %s [-options] filename1 filename2 ...\n" "\tread each binary input file and \n" "\tcreate a .c file with a byte array that contains the input file's data\n" "options:\n" "\t-h or -? or --help this usage text\n" "\t-d or --destdir destination directory, followed by the path\n" "\t-n or --name symbol prefix, followed by the prefix\n" "\t-e or --entrypoint entry point name, followed by the name (_dat will be appended)\n" "\t-r or --revision Specify a version\n" , argv[0]); #ifdef CAN_GENERATE_OBJECTS fprintf(stderr, "\t-o or --object write a .obj file instead of .c\n" "\t-m or --match-arch file.o match the architecture (CPU, 32/64 bits) of the specified .o\n" "\t ELF format defaults to i386. Windows defaults to the native platform.\n"); #endif fprintf(stderr, "\t-f or --filename Specify an alternate base filename. (default: symbolname_typ)\n" "\t-a or --assembly Create assembly file. (possible values are: "); printAssemblyHeadersToStdErr(); } else { const char *message, *filename; /* TODO: remove void (*writeCode)(const char *, const char *); */ if(options[kOptAssembly].doesOccur) { message="generating assembly code for %s\n"; writeCode = CALL_WRITEASSEMBLY; /* TODO: remove writeCode=&writeAssemblyCode; */ if (!checkAssemblyHeaderName(options[kOptAssembly].value)) { fprintf(stderr, "Assembly type \"%s\" is unknown.\n", options[kOptAssembly].value); return -1; } } #ifdef CAN_GENERATE_OBJECTS else if(options[kOptObject].doesOccur) { message="generating object code for %s\n"; writeCode = CALL_WRITEOBJECT; /* TODO: remove writeCode=&writeObjectCode; */ } #endif else { message="generating C code for %s\n"; writeCode = CALL_WRITECCODE; /* TODO: remove writeCode=&writeCCode; */ } while(--argc) { filename=getLongPathname(argv[argc]); if (verbose) { fprintf(stdout, message, filename); } switch (writeCode) { case CALL_WRITECCODE: writeCCode(filename, options[kOptDestDir].value, options[kOptName].doesOccur ? options[kOptName].value : NULL, options[kOptFilename].doesOccur ? options[kOptFilename].value : NULL, NULL); break; case CALL_WRITEASSEMBLY: writeAssemblyCode(filename, options[kOptDestDir].value, options[kOptEntryPoint].doesOccur ? options[kOptEntryPoint].value : NULL, options[kOptFilename].doesOccur ? options[kOptFilename].value : NULL, NULL); break; #ifdef CAN_GENERATE_OBJECTS case CALL_WRITEOBJECT: writeObjectCode(filename, options[kOptDestDir].value, options[kOptEntryPoint].doesOccur ? options[kOptEntryPoint].value : NULL, options[kOptMatchArch].doesOccur ? options[kOptMatchArch].value : NULL, options[kOptFilename].doesOccur ? options[kOptFilename].value : NULL, NULL); break; #endif default: /* Should never occur. */ break; } /* TODO: remove writeCode(filename, options[kOptDestDir].value); */ } } return 0; }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.IO; using System.Reflection; using System.Runtime.InteropServices; using CoreFXTestLibrary; public struct someStruct { public bool p; private int var; } [StructLayout(LayoutKind.Explicit)] public class MySystemTime { [FieldOffset(0)] public ushort wYear; [FieldOffset(2)] public ushort wMonth; [FieldOffset(4)] public ushort wDayOfWeek; [FieldOffset(6)] public ushort wDay; [FieldOffset(8)] public ushort wHour; [FieldOffset(10)] public ushort wMinute; [FieldOffset(12)] public ushort wSecond; [FieldOffset(14)] public ushort wMilliseconds; } [StructLayout(LayoutKind.Sequential)] public class MyPoint { public int x; public int y; } public class NoLayoutPoint { public int x; public int y; } [StructLayout(LayoutKind.Sequential)] public class NonExistField { } [StructLayout(LayoutKind.Explicit)] internal struct ExplicitLayoutTest { [FieldOffset(0)] public short m_short1; // 2 bytes [FieldOffset(2)] public short m_short2; // 2 bytes [FieldOffset(4)] public byte union1_byte1; // 1 byte [FieldOffset(5)] public byte union1_byte2; // 1 byte [FieldOffset(6)] public short union1_short1; // 2 bytes [FieldOffset(8)] public Int32 union1_int1; // 4 bytes [FieldOffset(12)] public Int32 union1_int2; // 4 bytes [FieldOffset(16)] public double union1_double1; // 8 bytes [FieldOffset(4)] public ushort union2_ushort1; // 2 bytes [FieldOffset(6)] public ushort union2_ushort2; // 2 bytes [FieldOffset(8)] public Int32 union3_int1; // 4 bytes [FieldOffset(8)] public decimal union3_decimal1; // 16 bytes [FieldOffset(24)] public ushort m_ushort1; // 2 bytes // 6 bytes of padding [FieldOffset(32)] public decimal m_decimal1; // 16 bytes [FieldOffset(48)] public char m_char1; // 1 byte // 7 bytes of padding } internal struct FieldAlignementTest { public byte m_byte1; // 1 byte // 1 byte of padding public short m_short1; // 2 bytes public short m_short2; // 2 bytes // 2 bytes of padding public Int32 m_int1; // 4 bytes public byte m_byte2; // 1 byte // 3 bytes of padding public Int32 m_int2; // 4 bytes // 4 bytes of padding public double m_double1; // 8 bytes public char m_char1; // 1 byte public char m_char2; // 1 byte public char m_char3; // 1 byte // 5 bytes of padding public double m_double2; // 8 bytes public byte m_byte3; // 1 byte public byte m_byte4; // 1 byte // 6 bytes of padding public decimal m_decimal1; // 16 bytes public char m_char4; // 1 byte // 7 bytes of padding } struct FieldAlignementTest_Decimal { public byte b; // 1 byte // 7 bytes of padding // The largest field in below struct is decimal (16 bytes wide). // However, alignment requirement for the below struct should be only 8 bytes (not 16). // This is because unlike fields of other types well known to mcg (like long, char etc.) // which need to be aligned according to their byte size, decimal is really a struct // with 8 byte alignment requirement. public FieldAlignementTest p; // 80 bytes public short s; // 2 bytes // 6 bytes of padding } struct FieldAlignementTest_Guid { public byte b; // 1 byte // 3 bytes of padding // Guid is really a struct with 4 byte alignment requirement (which is less than its byte size of 16 bytes). public Guid g; // 16 bytes public short s; // 2 bytes // 2 bytes of padding } struct FieldAlignementTest_Variant { public byte b; // 1 byte // 7 bytes of padding // Using [MarshalAs(UnmanagedType.Struct)] means that the Variant type will be used for field 'v' on native side. // Variant is really a struct with 8 byte alignment requirement (which is less than its byte size of 24 / 16 bytes). [MarshalAs(UnmanagedType.Struct)] public object v; // 16 bytes on 32-bit, 24 bytes on 64-bit public short s; // 2 bytes // 6 bytes of padding }; public class OffsetTest { public static void NullParameter() { Assert.Throws<ArgumentNullException>(() => Marshal.OffsetOf(null, null)); Assert.Throws<ArgumentNullException>(() => Marshal.OffsetOf(new object().GetType(), null)); Assert.Throws<ArgumentNullException>(() => Marshal.OffsetOf(null, "abcd")); } public static void NonExistField() { Assert.Throws<ArgumentException>(() => Marshal.OffsetOf(typeof(NonExistField), "NonExistField")); } public static void NoLayoutClass() { Assert.Throws<ArgumentException>(() => Marshal.OffsetOf(typeof(NoLayoutPoint), "x")); } public static void StructField() { Assert.AreEqual(new IntPtr(4), Marshal.OffsetOf(typeof(someStruct), "var")); } public static void ClassExplicitField() { Assert.AreEqual(new IntPtr(0), Marshal.OffsetOf(typeof(MySystemTime), "wYear")); Assert.AreEqual(new IntPtr(8), Marshal.OffsetOf(typeof(MySystemTime), "wHour")); Assert.AreEqual(new IntPtr(14), Marshal.OffsetOf(typeof(MySystemTime), "wMilliseconds")); } public static void ClassSequentialField() { Assert.AreEqual(new IntPtr(0), Marshal.OffsetOf(typeof(MyPoint), "x")); Assert.AreEqual(new IntPtr(4), Marshal.OffsetOf(typeof(MyPoint), "y")); } public static void ProjectedType() { #if BUG_1212387 Assert.AreEqual(new IntPtr(0), Marshal.OffsetOf(typeof(Windows.Foundation.Point), "_x")); Assert.AreEqual(new IntPtr(1), Marshal.OffsetOf(typeof(Windows.UI.Color), "_R")); #endif } public static void TestExplicitLayout() { var t = typeof(ExplicitLayoutTest); Assert.AreEqual(56, Marshal.SizeOf(t)); Assert.AreEqual(new IntPtr(0), Marshal.OffsetOf(t, "m_short1")); Assert.AreEqual(new IntPtr(2), Marshal.OffsetOf(t, "m_short2")); Assert.AreEqual(new IntPtr(4), Marshal.OffsetOf(t, "union1_byte1")); Assert.AreEqual(new IntPtr(5), Marshal.OffsetOf(t, "union1_byte2")); Assert.AreEqual(new IntPtr(6), Marshal.OffsetOf(t, "union1_short1")); Assert.AreEqual(new IntPtr(8), Marshal.OffsetOf(t, "union1_int1")); Assert.AreEqual(new IntPtr(12), Marshal.OffsetOf(t, "union1_int2")); Assert.AreEqual(new IntPtr(16), Marshal.OffsetOf(t, "union1_double1")); Assert.AreEqual(new IntPtr(4), Marshal.OffsetOf(t, "union2_ushort1")); Assert.AreEqual(new IntPtr(6), Marshal.OffsetOf(t, "union2_ushort2")); Assert.AreEqual(new IntPtr(8), Marshal.OffsetOf(t, "union3_int1")); Assert.AreEqual(new IntPtr(8), Marshal.OffsetOf(t, "union3_decimal1")); Assert.AreEqual(new IntPtr(24), Marshal.OffsetOf(t, "m_ushort1")); Assert.AreEqual(new IntPtr(32), Marshal.OffsetOf(t, "m_decimal1")); Assert.AreEqual(new IntPtr(48), Marshal.OffsetOf(t, "m_char1")); } public static void TestFieldAlignment() { var t = typeof(FieldAlignementTest); Assert.AreEqual(80, Marshal.SizeOf(t)); Assert.AreEqual(new IntPtr(0), Marshal.OffsetOf(t, "m_byte1")); Assert.AreEqual(new IntPtr(2), Marshal.OffsetOf(t, "m_short1")); Assert.AreEqual(new IntPtr(4), Marshal.OffsetOf(t, "m_short2")); Assert.AreEqual(new IntPtr(8), Marshal.OffsetOf(t, "m_int1")); Assert.AreEqual(new IntPtr(12), Marshal.OffsetOf(t, "m_byte2")); Assert.AreEqual(new IntPtr(16), Marshal.OffsetOf(t, "m_int2")); Assert.AreEqual(new IntPtr(24), Marshal.OffsetOf(t, "m_double1")); Assert.AreEqual(new IntPtr(32), Marshal.OffsetOf(t, "m_char1")); Assert.AreEqual(new IntPtr(33), Marshal.OffsetOf(t, "m_char2")); Assert.AreEqual(new IntPtr(34), Marshal.OffsetOf(t, "m_char3")); Assert.AreEqual(new IntPtr(40), Marshal.OffsetOf(t, "m_double2")); Assert.AreEqual(new IntPtr(48), Marshal.OffsetOf(t, "m_byte3")); Assert.AreEqual(new IntPtr(49), Marshal.OffsetOf(t, "m_byte4")); Assert.AreEqual(new IntPtr(56), Marshal.OffsetOf(t, "m_decimal1")); Assert.AreEqual(new IntPtr(72), Marshal.OffsetOf(t, "m_char4")); } public static void TestFieldAlignment_Decimal() { var t = typeof(FieldAlignementTest_Decimal); Assert.AreEqual(96, Marshal.SizeOf(t)); Assert.AreEqual(new IntPtr(0), Marshal.OffsetOf(t, "b")); Assert.AreEqual(new IntPtr(8), Marshal.OffsetOf(t, "p")); Assert.AreEqual(new IntPtr(88), Marshal.OffsetOf(t, "s")); } public static void TestFieldAlignment_Guid() { var t = typeof(FieldAlignementTest_Guid); Assert.AreEqual(24, Marshal.SizeOf(t)); Assert.AreEqual(new IntPtr(0), Marshal.OffsetOf(t, "b")); Assert.AreEqual(new IntPtr(4), Marshal.OffsetOf(t, "g")); Assert.AreEqual(new IntPtr(20), Marshal.OffsetOf(t, "s")); } public static void TestFieldAlignment_Variant() { var t = typeof(FieldAlignementTest_Variant); Assert.AreEqual(new IntPtr(0), Marshal.OffsetOf(t, "b")); Assert.AreEqual(new IntPtr(8), Marshal.OffsetOf(t, "v")); if (IntPtr.Size == 4) { Assert.AreEqual(new IntPtr(24), Marshal.OffsetOf(t, "s")); Assert.AreEqual(32, Marshal.SizeOf(t)); } else if (IntPtr.Size == 8) { Assert.AreEqual(new IntPtr(32), Marshal.OffsetOf(t, "s")); Assert.AreEqual(40, Marshal.SizeOf(t)); } else { Assert.Fail(string.Format("Unexpected value '{0}' for IntPtr.Size", IntPtr.Size)); } } public static int Main(String[] args) { //https://github.com/dotnet/coreclr/issues/2075 //TestFieldAlignment_Variant(); TestFieldAlignment_Guid(); TestFieldAlignment_Decimal(); TestFieldAlignment(); TestExplicitLayout(); ClassSequentialField(); NullParameter(); NonExistField(); NoLayoutClass(); StructField(); ClassExplicitField(); return 100; } }
package io.netty.example.echo; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.ChannelHandlerAdapter; import io.netty.channel.ChannelHandlerContext; /** * Handler implementation for the echo client. It initiates the ping-pong * traffic between the echo client and server by sending the first message to * the server. */ public class EchoClientHandler extends ChannelHandlerAdapter { private final ByteBuf firstMessage; /** * Creates a client-side handler. */ public EchoClientHandler() { firstMessage = Unpooled.buffer(EchoClient.SIZE); for (int i = 0; i < firstMessage.capacity(); i ++) { firstMessage.writeByte((byte) i); } } @Override public void channelActive(ChannelHandlerContext ctx) { ctx.writeAndFlush(firstMessage); } @Override public void channelRead(ChannelHandlerContext ctx, Object msg) { ctx.write(msg); } @Override public void channelReadComplete(ChannelHandlerContext ctx) { ctx.flush(); } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { // Close the connection when an exception is raised. cause.printStackTrace(); ctx.close(); } }
import { race as staticRace } from '../../observable/race'; declare module '../../Observable' { namespace Observable { let race: typeof staticRace; } }
 #pragma once #include <aws/dms/DatabaseMigrationService_EXPORTS.h> #include <aws/core/utils/memory/stl/AWSString.h> namespace Aws { namespace DatabaseMigrationService { namespace Model { enum class MessageFormatValue { NOT_SET, json, json_unformatted }; namespace MessageFormatValueMapper { AWS_DATABASEMIGRATIONSERVICE_API MessageFormatValue GetMessageFormatValueForName(const Aws::String& name); AWS_DATABASEMIGRATIONSERVICE_API Aws::String GetNameForMessageFormatValue(MessageFormatValue value); } // namespace MessageFormatValueMapper } // namespace Model } // namespace DatabaseMigrationService } // namespace Aws
import sys from sokoban import Sokoban ''' Tests search algos Handles command line and user input ''' def runSearch(s, filename, option): ''' Runs the search based on filename and option selected ''' b = s.new_board(filename) print '\nSolving ' + filename + '...' s.doSearches(b, option) sok = Sokoban() print "Which algorithm?" print "1) Breadth first search" # print "2) Depth first search" print "3) Uniform cost search" # print "4) Greedy best first search" # print "5) A* search" print "6) all" p = raw_input("Type a number and press enter: ") option = int(p) # gets file from args and plays that puzzle if len(sys.argv) == 2: runSearch(sok, sys.argv[1], option) else: runSearch(sok, 'puzzles/easy1.txt', option) runSearch(sok, 'puzzles/easy3.txt', option) runSearch(sok, 'puzzles/mod1.txt', option)
layout: lab num: lab10 ready: true desc: "Functions and Visualization" assigned: 2017-08-27 13:15:00.00-7 due: 2017-08-29 16:45:00.00-7 --- ## This lab is optional. If you are interesed in data analytics, it is a good lab for you! Welcome to lab 10! We'll practice functions and the table method `apply` from [this ebook description](https://www.inferentialthinking.com/chapters/07/1/applying-a-function-to-a-column.html). We'll also learn about visualization from [a chapter in data8](https://www.inferentialthinking.com/chapters/06/visualization.html). ## First, create a lab10 repo Go to github.com and create a new repo called spis16-lab10-Name-Name using Method 1. When creating the repo import the starter code from this git repo: https://github.com/ucsd-cse-spis-2017/lab10starter Then use `git clone` to clone this into your `~/github` directory. In the repo, you will see that there are three files, world_population.csv and imdb.csv. They are used in this lab for data analysis. ## Step 2: start ipython and get into the pylab mode Run commands in terminal ``` ipython %pylab ``` This is where we will start to use tables to read in and analyze data. First type in ``` import numpy as np from datascience import * import matplotlib import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') ``` This will import the right packages. The matplotlib package will allow us to have graphing capabilities Now let's start the lab. When turning in the lab, turn in the code that you complete for each of the questions in a python file. Clearly number the answers such as #Q2.1 etc. ## 1. Functions and CEO Incomes Let's start with a real data analysis task. We'll look at the 2015 compensation of CEOs at the 100 largest companies in California. The data were compiled for a Los Angeles Times analysis [here](http://spreadsheets.latimes.com/california-ceo-compensation/), and ultimately came from [filings](https://www.sec.gov/answers/proxyhtf.htm) mandated by the SEC from all publicly-traded companies. Two companies have two CEOs, so there are 102 CEOs in the dataset. We've copied the data in raw form from the LA Times page into a file called `raw_compensation.csv`. (The page notes that all dollar amounts are in millions of dollars.) Run the following command in ipython ``` raw_compensation = Table.read_table('raw_compensation.csv') raw_compensation ``` **Question 1.1** We want to compute the average of the CEOs' pay. Try running the statements and **You should see an error** ``` np.average(raw_compensation.column("Total Pay")) ``` Let's examine why this error occured by looking at the values in the "Total Pay" column. Use the `type` function and set `total_pay_type` to the type of the first value in the "Total Pay" column. ``` total_pay_type = ... total_pay_type ``` **Question 1.2.** You should have found that the values in "Total Pay" column are `numpy.ndarray` which means it is not data, but most likely just text (string). It doesn't make sense to take the average of the text values, so we need to convert them to numbers if we want to do this. Extract the first value in the "Total Pay" column. It's Mark Hurd's pay in 2015, in *millions* of dollars. Call it `mark_hurd_pay_string`. ``` mark_hurd_pay_string = ... mark_hurd_pay_string ``` **Question 1.3.** Convert `mark_hurd_pay_string` to a number of *dollars*. The string method `strip` will be useful for removing the dollar sign; it removes a specified character from the start or end of a string. For example, the value of `"100%".strip("%")` is the string `"100"`. You'll also need the function `float`, which converts a string that looks like a number to an actual number. Last, remember that the answer should be in dollars, not millions of dollars. ``` mark_hurd_pay = ... mark_hurd_pay ``` To compute the average pay, we need to do this for every CEO. But that looks like it would involve copying this code 102 times. This is where functions come in. First, we'll define a new function, giving a name to the expression that converts "total pay" strings to numeric values. Later in this lab we'll see the payoff: we can call that function on every pay string in the dataset at once. **Question 1.4.** Copy the expression you used to compute `mark_hurd_pay` as the `return` expression of the function below, but replace the specific `mark_hurd_pay_string` with the generic `pay_string` name specified in the first line of the `def` statement. ``` def convert_pay_string_to_number(pay_string): """Converts a pay string like '$100' (in millions) to a number of dollars.""" return ... ``` Running that cell doesn't convert any particular pay string. Instead, it creates a function called `convert_pay_string_to_number` that can convert any string with the right format to a number representing millions of dollars. We can call our function just like we call the built-in functions we've seen. It takes one argument, a string, and it returns a number. ``` convert_pay_string_to_number('$42') convert_pay_string_to_number(mark_hurd_pay_string) # We can also compute Safra Catz's pay in the same way: convert_pay_string_to_number(raw_compensation.where("Name", are.containing("Safra")).column("Total Pay").item(0)) ``` What have we gained? Well, without the function, we'd have to copy that `10**6 * float(pay_string.strip("$"))` stuff each time we wanted to convert a pay string. Now we just call a function whose name says exactly what it's doing. Soon, we'll see how to apply this function to every pay string in a single expression. First, let's write some more functions. ## 2. Defining functions Let's write a very simple function that converts a proportion to a percentage by multiplying it by 100. For example, the value of `to_percentage(.5)` should be the number 50. (No percent sign.) A function definition has a few parts. ##### `def` It always starts with `def` (short for **def**ine): def ##### Name Next comes the name of the function. Let's call our function `to_percentage`. def to_percentage ##### Signature Next comes something called the *signature* of the function. This tells Python how many arguments your function should have, and what names you'll use to refer to those arguments in the function's code. `to_percentage` should take one argument, and we'll call that argument `proportion` since it should be a proportion. def to_percentage(proportion) We put a colon after the signature to tell Python it's over. def to_percentage(proportion): ##### Documentation Functions can do complicated things, so you should write an explanation of what your function does. For small functions, this is less important, but it's a good habit to learn from the start. Conventionally, Python functions are documented by writing a triple-quoted string: def to_percentage(proportion): """Converts a proportion to a percentage.""" ##### Body Now we start writing code that runs when the function is called. This is called the *body* of the function. We can write anything we could write anywhere else. First let's give a name to the number we multiply a proportion by to get a percentage. def to_percentage(proportion): """Converts a proportion to a percentage.""" factor = 100 ##### `return` The special instruction `return` in a function's body tells Python to make the value of the function call equal to whatever comes right after `return`. We want the value of `to_percentage(.5)` to be the proportion .5 times the factor 100, so we write: def to_percentage(proportion): """Converts a proportion to a percentage.""" factor = 100 return proportion * factor **Question 2.1.** Define `to_percentage` in the cell below. Call your function to convert the proportion .2 to a percentage. Name that percentage `twenty_percent`. ``` def ... """ ... """ ... = ... return ... twenty_percent = ... twenty_percent ``` Like the built-in functions, you can use named values as arguments to your function. **Question 2.2.** Use `to_percentage` again to convert the proportion named `a_proportion` (defined below) to a percentage called `a_percentage`. *Note:* You don't need to define `to_percentage` again! Just like other named things, functions stick around after you define them. ``` a_proportion = 2**(.5) / 2 a_percentage = ... a_percentage ``` Here's something important about functions: the names assigned within a function body are only accessible within the function body. Once the function has returned, those names are gone. So even though you defined `factor = 100` inside `to_percentage` above and then called `to_percentage`, you cannot refer to `factor` anywhere except inside the body of `to_percentage`: **Question 2.3.** Define a function called `disemvowel`. It should take a single string as its argument. (You can call that argument whatever you want.) It should return a copy of that string, but with all the characters that are vowels removed. (In English, the vowels are the characters "a", "e", "i", "o", and "u".) *Hint:* To remove all the "a"s from a string, you can use `that_string.replace("a", "")`. And you can call `replace` multiple times. ``` def disemvowel(a_string): ... ... # An example call to your function. (It's often helpful to run # an example call from time to time while you're writing a function, # to see how it currently works.) disemvowel("Can you read this without vowels?") ``` ##### Calls on calls on calls Just as you write a series of lines to build up a complex computation, it's useful to define a series of small functions that build on each other. Since you can write any code inside a function's body, you can call other functions you've written. If a function is a like a recipe, defining a function in terms of other functions is like having a recipe for cake telling you to follow another recipe to make the frosting, and another to make the sprinkles. This makes the cake recipe shorter and clearer, and it avoids having a bunch of duplicated frosting recipes. It's a foundation of productive programming. For example, suppose you want to count the number of characters *that aren't vowels* in a piece of text. One way to do that is this to remove all the vowels and count the size of the remaining string. **Question 2.4.** Write a function called `num_non_vowels`. It should take a string as its argument and return a number. The number should be the number of characters in the argument string that aren't vowels. *Hint:* The function `len` takes a string as its argument and returns the number of characters in it. ``` def num_non_vowels(a_string): """The number of characters in a string, minus the vowels.""" ... ``` Functions can also encapsulate code that *does things* rather than just computing values. For example, if you call `print` inside a function, and then call that function, something will get printed. The `movies_by_year` dataset has information about movie sales in recent years. You can read it in and show the first 10 rows by doing ``` movies_by_year = Table.read_table("movies_by_year.csv") movies_by_year ``` Suppose you'd like to display the year with the 5th-highest total gross movie sales, printed in a human-readable way. You might do this: ``` rank = 5 fifth_from_top_movie_year = movies_by_year.sort("Total Gross", descending=True).column("Year").item(rank-1) print("Year number", rank, "for total gross movie sales was:", fifth_from_top_movie_year) ``` After writing this, you realize you also wanted to print out the 2nd and 3rd-highest years. Instead of copying your code, you decide to put it in a function. Since the rank varies, you make that an argument to your function. **Question 2.5.** Write a function called `print_kth_top_movie_year`. It should take a single argument, the rank of the year (like 2, 3, or 5 in the above examples). It should print out a message like the one above. It shouldn't have a `return` statement. ``` def print_kth_top_movie_year(k): # Our solution used 2 lines. ... ... # Example calls to your function: print_kth_top_movie_year(2) print_kth_top_movie_year(3) ``` ## 3. `apply`ing functions Defining a function is a lot like giving a name to a value with `=`. In fact, a function is a value just like the number 1 or the text "the"! For example, we can make a new name for the built-in function `max` if we want: ``` our_name_for_max = max our_name_for_max(2, 6) ``` The old name for `max` is still around: ``` max(2, 6) ``` Try just writing `max` or `our_name_for_max` (or the name of any other function) in a cell, and run that cell. Python will print out a (very brief) description of the function. ``` max ``` Why is this useful? Since functions are just values, it's possible to pass them as arguments to other functions. Here's a simple but not-so-practical example: we can make an array of functions. ``` make_array(max, np.average, are.equal_to) ``` **Question 3.1.** Make an array containing any 3 other functions you've seen. Call it `some_functions`. ``` some_functions = ... some_functions ``` Working with functions as values can lead to some funny-looking code. For example, see if you can figure out why this works: ``` make_array(max, np.average, are.equal_to).item(0)(4, -2, 7) ``` Here's a simpler example that's actually useful: the table method `apply`. `apply` calls a function many times, once on *each* element in a column of a table. It produces an array of the results. Here we use `apply` to convert every CEO's pay to a number, using the function you defined: ``` raw_compensation.apply(convert_pay_string_to_number, "Total Pay") ``` Here's an illustration of what that did: <img src="apply.png"/> Note that we didn't write something like `convert_pay_string_to_number()` or `convert_pay_string_to_number("Total Pay")`. The job of `apply` is to call the function we give it, so instead of calling `convert_pay_string_to_number` ourselves, we just write its name as an argument to `apply`. **Question 3.2.** Using `apply`, make a table that's a copy of `raw_compensation` with one more column called "Total Pay (\$)". It should be the result of applying `convert_pay_string_to_number` to the "Total Pay" column, as we did above. Call the new table `compensation`. ``` compensation = raw_compensation.with_column( "Total Pay ($)", ... compensation ``` Now that we have the pay in numbers, we can compute things about them. **Question 3.3.** Compute the average total pay of the CEOs in the dataset. ``` average_total_pay = average_total_pay ``` ## 4. Histograms Earlier, we computed the average pay among the CEOs in our 102-CEO dataset. The average doesn't tell us everything about the amounts CEOs are paid, though. Maybe just a few CEOs make the bulk of the money, even among these 102. We can use a *histogram* to display more information about a set of numbers. The table method `hist` takes a single argument, the name of a column of numbers. It produces a histogram of the numbers in that column. **Question 4.1.** Make a histogram of the pay of the CEOs in `compensation`. Hint: type help(compensation.hist) to know more about drawing histograms. Can you count number of CEOs who make more than 30 million a year? ``` ... ``` **Question 4.2.** Looking at the histogram, how many CEOs made more than \$30 million? (Answer the question with code. *Hint:* Use the table method `where` and the property `num_rows`.) ``` num_ceos_more_than_30_million = ... ``` Great job! :D You're finished with lab 10!
package org.dcm4che.tools.printscu; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; import java.awt.Container; import java.awt.Dimension; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.StringWriter; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.dcm4che.client.AssociationRequestor; import org.dcm4che.client.PrintSCU; import org.dcm4che.data.Dataset; import org.dcm4che.data.DcmObjectFactory; import org.dcm4che.data.DcmParser; import org.dcm4che.data.DcmParserFactory; import org.dcm4che.dict.Tags; import org.dcm4che.net.DcmServiceException; import org.dcm4che.util.UIDGenerator; public class PrintSCUFrame extends JFrame { public static final String DEFAULT_PROPERTIES_FILE = "PrintSCU.properties"; private static final int DEF_WIDTH = 600, DEF_HEIGHT = 500; private final Logger log = Logger.getLogger(PrintSCUFrame.class); private AssociationRequestor assocRq = new AssociationRequestor(); private PrintSCU printSCU; private String curPLutUid; private int nextImageBoxIndex; private int nextAnnIndex; private boolean colorMode = false; private boolean applySeparatePresState = false; private Action actConnect, actRelease, actCreateFilmSession, actDeleteFilmSession, actCreateFilmBox, actDeleteFilmBox, actCreateImageBox, actCreatePlut, actCreateAnnotation, actDeletePlut, actPrintFilmSession, actPrintFilmBox, actExit; private File lastFile = null; //for JFileChooser to remember last dir private JFileChooser chooser = new JFileChooser(); private DcmObjectFactory dcmFactory = DcmObjectFactory.getInstance(); private UIDGenerator uidGen = UIDGenerator.getInstance(); private JSplitPane panel; private JPanel btnPanel; private PropertiesPanel propPanel; public static final class PrintSCUConfigurationException extends RuntimeException { PrintSCUConfigurationException() { super(); } PrintSCUConfigurationException(String msg) { super(msg); } } PrintSCUFrame() { Container contentPane = this.getContentPane(); btnPanel = new JPanel(); btnPanel.setLayout(new GridLayout(2, 3)); propPanel = new PropertiesPanel(this, DEFAULT_PROPERTIES_FILE); JScrollPane scrollingPanel = new JScrollPane(propPanel); contentPane.add(panel = new JSplitPane( JSplitPane.VERTICAL_SPLIT, btnPanel, scrollingPanel)); btnPanel.setMinimumSize(new Dimension(DEF_WIDTH, DEF_HEIGHT/4)); propPanel.setMinimumSize(new Dimension(DEF_WIDTH, DEF_HEIGHT/8)); //Main Menus JMenuBar mnubar = new JMenuBar(); setJMenuBar(mnubar); JMenu mnuFile = new JMenu("File"); mnubar.add(mnuFile); // File menu actExit = new AbstractAction() { public void actionPerformed(ActionEvent e) { System.exit(0); } }; actExit.putValue(Action.NAME,"Exit"); JMenuItem mnuExit = new JMenuItem(actExit); mnuFile.add(mnuExit); //set size setSize(new Dimension(DEF_WIDTH, DEF_HEIGHT)); //Print SCP related actions //Connect actConnect = new AbstractAction() { public void actionPerformed(ActionEvent e) { //connect Integer anInt; String aString; try { if ((anInt = getIntegerFromProperty("MaxPduSize")) != null) assocRq.setMaxPDULength(anInt.intValue()); if ((aString = getStringFromProperty("CallingAET")) == null) throw new PrintSCUConfigurationException(); assocRq.setCallingAET(aString); if ((aString = getStringFromProperty("CalledAET")) == null) throw new PrintSCUConfigurationException(); assocRq.setCalledAET(aString); if ((aString = getStringFromProperty("Host")) == null) throw new PrintSCUConfigurationException(); assocRq.setHost(aString); if ((anInt = getIntegerFromProperty("Port")) == null) throw new PrintSCUConfigurationException(); assocRq.setPort(anInt.intValue()); } catch (PrintSCUConfigurationException e1) { JOptionPane.showMessageDialog(PrintSCUFrame.this, e1); } printSCU = new PrintSCU(assocRq); printSCU.setAutoRefPLUT(true); //always create P-LUT when Film Box is created printSCU.setCreateRQwithIUID(true); printSCU.setNegotiatePLUT(true); printSCU.setNegotiateAnnotation(true); printSCU.setNegotiateColorPrint(colorMode); printSCU.setNegotiateGrayscalePrint(!colorMode); curPLutUid = new String(); try { assocRq.connect(); } catch (IOException e1) { e1.printStackTrace(); return; } setEnabled(false); actCreateFilmSession.setEnabled(true); actRelease.setEnabled(true); } }; actConnect.putValue(Action.NAME, "Connect"); //Release actRelease = new AbstractAction() { public void actionPerformed(ActionEvent e) { //release try { assocRq.release(); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } printSCU = null; onDisconnect(); actConnect.setEnabled(true); } }; actRelease.putValue(Action.NAME, "Release"); //Create Session actCreateFilmSession = new AbstractAction() { public void actionPerformed(ActionEvent e) { Dataset attr = dcmFactory.newDataset(); String prop; Integer propInt; if ((propInt = getIntegerFromProperty("Session.NumberOfCopies")) != null) attr.putIS(Tags.NumberOfCopies, propInt.intValue()); if ((prop = getStringFromProperty("Session.PrintPriority")) != null) attr.putCS(Tags.PrintPriority, prop); if ((prop = getStringFromProperty("Session.MediumType")) != null) attr.putCS(Tags.MediumType, prop); if ((prop = getStringFromProperty("Session.FilmDestination")) != null) attr.putCS(Tags.FilmDestination, prop); if ((prop = getStringFromProperty("Session.FilmSessionLabel")) != null) attr.putLO(Tags.FilmSessionLabel, prop); if ((propInt = getIntegerFromProperty("Session.MemoryAllocation")) != null) attr.putIS(Tags.MemoryAllocation, propInt.intValue()); if ((prop = getStringFromProperty("Session.OwnerID")) != null) attr.putSH(Tags.OwnerID, prop); //dump to log dump(attr, "Film Session"); try { printSCU.createFilmSession(attr, colorMode); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (DcmServiceException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } actCreateFilmBox.setEnabled(true); actCreatePlut.setEnabled(true); setEnabled(false); actDeleteFilmSession.setEnabled(true); } }; actCreateFilmSession.putValue(Action.NAME, "Create FilmSession"); //Create FilmBox actCreateFilmBox = new AbstractAction() { public void actionPerformed(ActionEvent e) { Dataset attr = dcmFactory.newDataset(); String prop; Integer propInt; if ((prop = getStringFromProperty("FilmBox.ImageDisplayFormat")) != null) attr.putST(Tags.ImageDisplayFormat, prop); if ((prop = getStringFromProperty("FilmBox.FilmOrientation")) != null) attr.putCS(Tags.FilmOrientation, prop); if ((prop = getStringFromProperty("FilmBox.FilmSizeID")) != null) attr.putCS(Tags.FilmSizeID, prop); if ((prop = getStringFromProperty("FilmBox.RequestedResolutionID")) != null) attr.putCS(Tags.RequestedResolutionID, prop); if ((prop = getStringFromProperty("FilmBox.AnnotationDisplayFormatID")) != null) attr.putCS(Tags.AnnotationDisplayFormatID, prop); if ((prop = getStringFromProperty("FilmBox.MagnificationType")) != null) attr.putCS(Tags.MagnificationType, prop); if ((prop = getStringFromProperty("FilmBox.SmoothingType")) != null) attr.putCS(Tags.SmoothingType, prop); if ((prop = getStringFromProperty("FilmBox.BorderDensity")) != null) attr.putCS(Tags.BorderDensity, prop); if ((prop = getStringFromProperty("FilmBox.EmptyImageDensity")) != null) attr.putCS(Tags.EmptyImageDensity, prop); if ((propInt = getIntegerFromProperty("FilmBox.MinDensity")) != null) attr.putUS(Tags.MinDensity, propInt.intValue()); if ((propInt = getIntegerFromProperty("FilmBox.MaxDensity")) != null) attr.putUS(Tags.MaxDensity, propInt.intValue()); if ((prop = getStringFromProperty("FilmBox.Trim")) != null) attr.putCS(Tags.Trim, prop); if ((prop = getStringFromProperty("FilmBox.ConfigurationInformation")) != null) attr.putST(Tags.ConfigurationInformation, prop); if ((propInt = getIntegerFromProperty("FilmBox.Illumination")) != null) attr.putUS(Tags.Illumination, propInt.intValue()); if ((propInt = getIntegerFromProperty("FilmBox.ReflectedAmbientLight")) != null) attr.putUS(Tags.ReflectedAmbientLight, propInt.intValue()); //dump to log dump(attr, "Film Box"); try { printSCU.createFilmBox(attr); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (DcmServiceException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } nextImageBoxIndex = 0; nextAnnIndex = 0; actCreateImageBox.setEnabled(true); setEnabled(false); actDeleteFilmBox.setEnabled(true); actCreatePlut.setEnabled(false); actDeletePlut.setEnabled(false); actCreateAnnotation.setEnabled(true); } }; actCreateFilmBox.putValue(Action.NAME, "Create FilmBox"); //Create ImageBox actCreateImageBox = new AbstractAction() { public void actionPerformed(ActionEvent e) { File file, psFile = null; if (chooser.showOpenDialog(PrintSCUFrame.this) == JFileChooser.APPROVE_OPTION) { file = chooser.getSelectedFile(); Dataset attr = dcmFactory.newDataset(); String prop; Integer propInt; String configInfo; if ((prop = getStringFromProperty("FilmBox.Polarity")) != null) attr.putCS(Tags.Polarity, prop); if ((prop = getStringFromProperty("FilmBox.MagnificationType")) != null) attr.putCS(Tags.MagnificationType, prop); if ((prop = getStringFromProperty("FilmBox.SmoothingType")) != null) attr.putCS(Tags.SmoothingType, prop); if ((propInt = getIntegerFromProperty("FilmBox.MinDensity")) != null) attr.putUS(Tags.MinDensity, propInt.intValue()); if ((propInt = getIntegerFromProperty("FilmBox.MaxDensity")) != null) attr.putUS(Tags.MaxDensity, propInt.intValue()); if ((prop = getStringFromProperty("FilmBox.RequestedDecimateCropBehavior")) != null) attr.putCS(Tags.RequestedDecimateCropBehavior, prop); if ((prop = getStringFromProperty("FilmBox.RequestedImageSize")) != null) attr.putDS(Tags.RequestedImageSize, prop); configInfo = getStringFromProperty("FilmBox.ConfigurationInformation"); try { if (curPLutUid == null) { if ((prop = getStringFromProperty("LUT.Gamma")) != null) { if (configInfo == null) configInfo = "gamma=" + prop; else configInfo = configInfo + "\\gamma=" + prop; } else if ((prop = getStringFromProperty("LUT.Shape")) != null) { curPLutUid = printSCU.createPLUT(prop); } else throw new PrintSCUConfigurationException( "You need to either create a P-LUT, set LUT.Shape, or LUT.Gamma"); } //finally write config info (with the plut gamma placed, if it exists) if (configInfo != null) attr.putST(Tags.ConfigurationInformation, configInfo); //dump to log dump(attr, "Image Box"); //create image box Boolean burnInOverlays = getBooleanFromProperty("User.BurnInOverlays"), autoScale = getBooleanFromProperty("User.AutoScale"); if (applySeparatePresState && chooser.showOpenDialog(PrintSCUFrame.this) == JFileChooser.APPROVE_OPTION) { psFile = chooser.getSelectedFile(); } printSCU.setImageBox(nextImageBoxIndex++, file, psFile, attr, (burnInOverlays != null) ? burnInOverlays.booleanValue() : false, (autoScale != null) ? autoScale.booleanValue() : true); } catch (PrintSCUConfigurationException e1) { JOptionPane.showMessageDialog(PrintSCUFrame.this, e1); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (DcmServiceException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } actPrintFilmSession.setEnabled(true); actPrintFilmBox.setEnabled(true); if (nextImageBoxIndex >= printSCU.countImageBoxes()) setEnabled(false); } } }; actCreateImageBox.putValue(Action.NAME, "Add ImageBox"); //Create Annotation actCreateAnnotation = new AbstractAction() { public void actionPerformed(ActionEvent e) { Dataset attr = dcmFactory.newDataset(); String text = (String)JOptionPane.showInputDialog(PrintSCUFrame.this, "Enter annotation text:", "Text" + (nextAnnIndex + 1)); if (text == null) return; try { printSCU.setAnnotationBox(nextAnnIndex++, text); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (DcmServiceException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } if (nextAnnIndex >= printSCU.countAnnotationBoxes()) setEnabled(false); } }; actCreateAnnotation.putValue(Action.NAME, "Add Annotation"); //Create P-LUT actCreatePlut = new AbstractAction() { public void actionPerformed(ActionEvent e) { String shape; Dataset ds = dcmFactory.newDataset(); if (chooser.showOpenDialog(PrintSCUFrame.this) != JFileChooser.APPROVE_OPTION) return; File file = chooser.getSelectedFile(); try { DcmParser parser = DcmParserFactory.getInstance().newDcmParser( new BufferedInputStream(new FileInputStream(file))); parser.setDcmHandler(ds.getDcmHandler()); parser.parseDcmFile(null, -1); if (ds.vm(Tags.PresentationLUTSeq) == -1) throw new IOException(); } catch (FileNotFoundException e1) { JOptionPane.showMessageDialog(PrintSCUFrame.this, "Could not open file: " + file); return; } catch (IOException e1) { JOptionPane.showMessageDialog(PrintSCUFrame.this, "Could not read file: " + file); return; } try { curPLutUid = printSCU.createPLUT(ds); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (DcmServiceException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } setEnabled(false); actDeletePlut.setEnabled(true); } }; actCreatePlut.putValue(Action.NAME, "Create P-LUT"); //Delete FilmSession actDeleteFilmSession = new AbstractAction() { public void actionPerformed(ActionEvent e) { try { printSCU.deleteFilmSession(); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (DcmServiceException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } setEnabled(false); actCreateFilmBox.setEnabled(false); actDeleteFilmBox.setEnabled(false); actCreateImageBox.setEnabled(false); actCreatePlut.setEnabled(false); actDeletePlut.setEnabled(false); actCreateAnnotation.setEnabled(false); actPrintFilmSession.setEnabled(false); actPrintFilmBox.setEnabled(false); actCreateFilmSession.setEnabled(true); } }; actDeleteFilmSession.putValue(Action.NAME, "Delete FilmSession"); //Delete FilmBox actDeleteFilmBox = new AbstractAction() { public void actionPerformed(ActionEvent e) { try { printSCU.deleteFilmBox(); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (DcmServiceException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } setEnabled(false); actCreateFilmBox.setEnabled(true); actCreateImageBox.setEnabled(false); actPrintFilmBox.setEnabled(false); actPrintFilmSession.setEnabled(false); actCreatePlut.setEnabled(true); actDeletePlut.setEnabled(true); actCreateAnnotation.setEnabled(false); } }; actDeleteFilmBox.putValue(Action.NAME, "Delete FilmBox"); //Delete P-LUT actDeletePlut = new AbstractAction() { public void actionPerformed(ActionEvent e) { try { printSCU.deletePLUT(curPLutUid); curPLutUid = null; } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (DcmServiceException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } setEnabled(false); actCreatePlut.setEnabled(true); } }; actDeletePlut.putValue(Action.NAME, "Delete P-LUT"); //Print FilmSession actPrintFilmSession = new AbstractAction() { public void actionPerformed(ActionEvent e) { try { printSCU.printFilmSession(); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (DcmServiceException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } } }; actPrintFilmSession.putValue(Action.NAME, "Print FilmSession"); //Print FilmBox actPrintFilmBox = new AbstractAction() { public void actionPerformed(ActionEvent e) { try { printSCU.printFilmBox(); } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } catch (DcmServiceException e1) { // TODO Auto-generated catch block e1.printStackTrace(); return; } } }; actPrintFilmBox.putValue(Action.NAME, "Print FilmBox"); //disable all buttons onDisconnect(); //set up buttons for commands JPanel subBtnPanel = new JPanel(); subBtnPanel.setLayout(new GridLayout(3, 1)); btnPanel.add(subBtnPanel); subBtnPanel.add(new JLabel("Print Server")); JButton btnConnect = new JButton(actConnect); subBtnPanel.add(btnConnect); JButton btnRelease = new JButton(actRelease); subBtnPanel.add(btnRelease); subBtnPanel = new JPanel(); subBtnPanel.setLayout(new GridLayout(3, 1)); btnPanel.add(subBtnPanel); subBtnPanel.add(new JLabel("Film Session")); JButton btnCreateFilmSession = new JButton(actCreateFilmSession); subBtnPanel.add(btnCreateFilmSession); JButton btnDeleteFilmSession = new JButton(actDeleteFilmSession); subBtnPanel.add(btnDeleteFilmSession); subBtnPanel = new JPanel(); subBtnPanel.setLayout(new GridLayout(3, 1)); btnPanel.add(subBtnPanel); subBtnPanel.add(new JLabel("Presentation LUT")); JButton btnCreatePlut = new JButton(actCreatePlut); subBtnPanel.add(btnCreatePlut); JButton btnDeletePlut = new JButton(actDeletePlut); subBtnPanel.add(btnDeletePlut); subBtnPanel = new JPanel(); subBtnPanel.setLayout(new GridLayout(3, 1)); btnPanel.add(subBtnPanel); subBtnPanel.add(new JLabel("Film Box")); JButton btnCreateFilmBox = new JButton(actCreateFilmBox); subBtnPanel.add(btnCreateFilmBox); JButton btnDeleteFilmBox = new JButton(actDeleteFilmBox); subBtnPanel.add(btnDeleteFilmBox); subBtnPanel = new JPanel(); subBtnPanel.setLayout(new GridLayout(3, 1)); btnPanel.add(subBtnPanel); subBtnPanel.add(new JLabel("Image Box")); JButton btnCreateImageBox = new JButton(actCreateImageBox); subBtnPanel.add(btnCreateImageBox); JCheckBox chkUseSeparatePresState = new JCheckBox(new AbstractAction("Apply Presentation State") { public void actionPerformed(ActionEvent e) { applySeparatePresState = !applySeparatePresState; } }); chkUseSeparatePresState.setSelected(applySeparatePresState); chkUseSeparatePresState.setToolTipText("Enables you to choose a separate Presentation State object to apply to the chosen DICOM image"); subBtnPanel.add(chkUseSeparatePresState); subBtnPanel = new JPanel(); subBtnPanel.setLayout(new GridLayout(3, 1)); btnPanel.add(subBtnPanel); subBtnPanel.add(new JLabel("Annotation")); JButton btnCreateAnnotation = new JButton(actCreateAnnotation); subBtnPanel.add(btnCreateAnnotation); subBtnPanel = new JPanel(); subBtnPanel.setLayout(new GridLayout(3, 1)); btnPanel.add(subBtnPanel); subBtnPanel.add(new JLabel("Print")); JButton btnPrintFilmSession = new JButton(actPrintFilmSession); subBtnPanel.add(btnPrintFilmSession); JButton btnPrintFilmBox = new JButton(actPrintFilmBox); subBtnPanel.add(btnPrintFilmBox); //update from all properties propertyChanged(null); } //propertyName == null, means all need to be updated public void propertyChanged(String propertyName) { //Verbose if (propertyName == null || "Verbose".equals(propertyName)) { Integer verbose; if ((verbose = getIntegerFromProperty("Verbose")) != null) { switch (verbose.intValue()) { case 0: log.setLevel(Level.OFF); break; case 1: log.setLevel(Level.FATAL); break; case 2: log.setLevel(Level.ERROR); break; case 3: log.setLevel(Level.WARN); break; case 4: log.setLevel(Level.INFO); break; case 5: log.setLevel(Level.DEBUG); break; case 6: log.setLevel(Level.ALL); break; } } else log.setLevel(Level.WARN); } } PrintSCUFrame(String title) { this(); setTitle(title); } protected void dump(Dataset ds, String from) { StringWriter out = new StringWriter(); try { ds.dumpDataset(out, null); } catch (IOException ioe) { log.warn("Could not dump attributes for " + from); } log.info(out.toString()); } protected String getStringFromProperty(String propertyName) { return (String)getFromProperty(propertyName, String.class); } protected Integer getIntegerFromProperty(String propertyName) { return (Integer)getFromProperty(propertyName, Integer.class); } protected Boolean getBooleanFromProperty(String propertyName) { return (Boolean)getFromProperty(propertyName, Boolean.class); } /* * Passing an unknown Class (or missing property value) returns null to caller */ private final Object getFromProperty(String propertyName, Class argType) { String prop; Object ret = null; if ((prop = propPanel.getProperty(propertyName)) != null) { try { if (argType == String.class) ret = prop; else if (argType == Integer.class) ret = Integer.valueOf(prop); else if (argType == Boolean.class) ret = Boolean.valueOf("true".equalsIgnoreCase(prop) || "yes".equalsIgnoreCase(prop) || "1".equals(prop)); } catch (NumberFormatException e) { log.warn(propertyName + " is an invalid number"); } } if (ret != null) { log.debug("Setting property " + propertyName + " = " + ret); } return ret; } private void onDisconnect() { actRelease.setEnabled(false); actCreateFilmSession.setEnabled(false); actCreateFilmBox.setEnabled(false); actCreateImageBox.setEnabled(false); actCreatePlut.setEnabled(false); actCreateAnnotation.setEnabled(false); actPrintFilmSession.setEnabled(false); actPrintFilmBox.setEnabled(false); actDeleteFilmSession.setEnabled(false); actDeleteFilmBox.setEnabled(false); actDeletePlut.setEnabled(false); } /* for abnormal exit */ private static void exit(String msg) { System.out.println(msg); System.out.println(USAGE); System.exit(1); } private final static String USAGE = "Usage: java -jar printSCU.jar [OPTIONS]\n\n" + "Connects to a DICOM Print Service Class Provider.\n" + "Options:\n" + " -h --help show this help and exit\n"; public static void main(String[] args) { BasicConfigurator.configure(); LongOpt[] longopts = { new LongOpt("help", LongOpt.NO_ARGUMENT, null, 'h') }; Getopt g = new Getopt("printSCU", args, "t:a:h", longopts, true); try { int c; while ((c = g.getopt()) != -1) { switch (c) { case 'h': case '?': exit(""); break; } } int optind = g.getOptind(); int argc = args.length - optind; if (argc != 0) { exit("printSCU: wrong number of arguments\n"); } PrintSCUFrame printSCU = new PrintSCUFrame("Print SCU Client"); printSCU.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); printSCU.show(); } catch (IllegalArgumentException e) { exit("printSCU: illegal argument - " + e.getMessage() + "\n"); } } }
YUI.add('io-nodejs', function(Y) { /*global Y: false, Buffer: false, clearInterval: false, clearTimeout: false, console: false, exports: false, global: false, module: false, process: false, querystring: false, require: false, setInterval: false, setTimeout: false, __filename: false, __dirname: false */ /** * Passthru to the NodeJS <a href="https://github.com/mikeal/request">request</a> module. * This method is return of `require('request')` so you can use it inside NodeJS without * the IO abstraction. * @method request * @static */ if (!Y.IO.request) { Y.IO.request = require('request'); } var codes = require('http').STATUS_CODES; Y.log('Loading NodeJS Request Transport', 'info', 'io'); /** NodeJS IO transport, uses the NodeJS <a href="https://github.com/mikeal/request">request</a> module under the hood to perform all network IO. @method transports.nodejs @static @returns {Object} This object contains only a `send` method that accepts a `transaction object`, `uri` and the `config object`. @example Y.io('https://somedomain.com/url', { method: 'PUT', data: '?foo=bar', //Extra request module config options. request: { maxRedirects: 100, strictSSL: true, multipart: [ { 'content-type': 'application/json', body: JSON.stringify({ foo: 'bar', _attachments: { 'message.txt': { follows: true, length: 18, 'content_type': 'text/plain' } } }) }, { body: 'I am an attachment' } ] }, on: { success: function(id, e) { Y.log(e.responseText); } } }); */ var flatten = function(o) { var str = []; Object.keys(o).forEach(function(name) { str.push(name + ': ' + o[name]); }); return str.join('\n'); }; Y.IO.transports.nodejs = function() { return { send: function (transaction, uri, config) { Y.log('Starting Request Transaction', 'info', 'io'); config.notify('start', transaction, config); config.method = config.method || 'GET'; config.method = config.method.toUpperCase(); var rconf = { method: config.method, uri: uri }; if (config.data) { if (Y.Lang.isObject(config.data)) { if (Y.QueryString && Y.QueryString.stringify) { Y.log('Stringifying config.data for request', 'info', 'io'); rconf.body = Y.QueryString.stringify(config.data); } else { Y.log('Failed to stringify config.data object, likely because `querystring-stringify-simple` is missing.', 'warn', 'io'); } } else if (Y.Lang.isString(config.data)) { rconf.body = config.data; } if (rconf.method === 'GET') { rconf.uri += (rconf.uri.indexOf('?') > -1 ? '&' : '?') + rconf.body; rconf.body = ''; } } if (config.headers) { rconf.headers = config.headers; } if (config.timeout) { rconf.timeout = config.timeout; } if (config.request) { Y.mix(rconf, config.request); } Y.log('Initiating ' + rconf.method + ' request to: ' + rconf.uri, 'info', 'io'); Y.IO.request(rconf, function(err, data) { Y.log('Request Transaction Complete', 'info', 'io'); if (err) { Y.log('An IO error occurred', 'warn', 'io'); transaction.c = err; config.notify(((err.code === 'ETIMEDOUT') ? 'timeout' : 'failure'), transaction, config); return; } if (data) { transaction.c = { status: data.statusCode, statusCode: data.statusCode, statusText: codes[data.statusCode], headers: data.headers, responseText: data.body, responseXML: null, getResponseHeader: function(name) { return this.headers[name]; }, getAllResponseHeaders: function() { return flatten(this.headers); } }; } Y.log('Request Transaction Complete', 'info', 'io'); config.notify('complete', transaction, config); config.notify(((data && (data.statusCode >= 200 && data.statusCode <= 299)) ? 'success' : 'failure'), transaction, config); }); var ret = { io: transaction }; return ret; } }; }; Y.IO.defaultTransport('nodejs'); }, '@VERSION@' ,{requires:['io-base']});
module Pakyow class RouteExpansionEval < RouteEval attr_writer :direct_path def eval(&block) @template_eval = RouteTemplateEval.from_scope(self, path: path, group: @group, hooks: @hooks) @template_eval.direct_path = @direct_path @template_eval.eval(&@template_block) @path = @template_eval.routes_path super instance_exec(&@template_eval.post_process) if @template_eval.post_process end def set_template(expansion_name, template) @expansion_name = expansion_name @template_block = template[1] @hooks = merge_hooks(@hooks, template[0]) end def action(method, *args, &block) fn, hooks = self.class.parse_action_args(args) fn = block if block_given? # get route info from template route = @template_eval.route_for_action(method) all_fns = route[3] all_fns[:fns].unshift(fn) if fn hooks = merge_hooks(hooks, all_fns[:hooks]) route[3] = build_fns(all_fns[:fns], hooks) register_route(route) end def action_group(*args, &block) name, hooks = self.class.parse_action_group_args(args) group = @template_eval.group_named(name) hooks = merge_hooks(hooks, group[0]) group(@expansion_name, hooks, &block) end def action_namespace(*args, &block) name, hooks = self.class.parse_action_namespace_args(args) namespace = @template_eval.namespace_named(name) hooks = merge_hooks(hooks, namespace[1]) namespace(@expansion_name, namespace[0], hooks, &block) end def method_missing(method, *args, &block) if @template_eval.has_action?(method) action(method, *args, &block) elsif @template_eval.has_namespace?(method) action_namespace(method, *args, &block) elsif @template_eval.has_group?(method) action_group(method, *args, &block) else super end rescue NoMethodError raise UnknownTemplatePart, "No action, namespace, or group named '#{method}'" end def expand(*args, &block) args[2] = File.join(@template_eval.nested_path.gsub(@path, ''), args[2]) super(*args, &block) end private class << self def parse_action_args(args) ret = [] args.each { |arg| if arg.is_a?(Hash) # we have hooks ret[1] = arg elsif arg.is_a?(Proc) # we have a fn ret[0] = arg end } ret end def parse_action_namespace_args(args) ret = [] args.each { |arg| if arg.is_a?(Hash) # we have hooks ret[1] = arg elsif arg.is_a?(Symbol) # we have a name ret[0] = arg end } ret end def parse_action_group_args(args) ret = [] args.each { |arg| if arg.is_a?(Hash) # we have hooks ret[1] = arg elsif !arg.nil? # we have a name ret[0] = arg end } ret end end end end
Partitions `data` into `num_partitions` tensors using indices from `partitions`. For each index tuple `js` of size `partitions.ndim`, the slice `data[js, ...]` becomes part of `outputs[partitions[js]]`. The slices with `partitions[js] = i` are placed in `outputs[i]` in lexicographic order of `js`, and the first dimension of `outputs[i]` is the number of entries in `partitions` equal to `i`. In detail, ```python outputs[i].shape = [sum(partitions == i)] + data.shape[partitions.ndim:] outputs[i] = pack([data[js, ...] for js if partitions[js] == i]) ``` `data.shape` must start with `partitions.shape`. For example: ```python # Scalar partitions. partitions = 1 num_partitions = 2 data = [10, 20] outputs[0] = [] # Empty with shape [0, 2] outputs[1] = [[10, 20]] # Vector partitions. partitions = [0, 0, 1, 1, 0] num_partitions = 2 data = [10, 20, 30, 40, 50] outputs[0] = [10, 20, 50] outputs[1] = [30, 40] ``` <div style="width:70%; margin:auto; margin-bottom:10px; margin-top:20px;"> <img style="width:100%" src="../../images/DynamicPartition.png" alt> </div> ##### Args: * <b>`data`</b>: A `Tensor`. * <b>`partitions`</b>: A `Tensor` of type `int32`. Any shape. Indices in the range `[0, num_partitions)`. * <b>`num_partitions`</b>: An `int` that is `>= 1`. The number of partitions to output. * <b>`name`</b>: A name for the operation (optional). ##### Returns: A list of `num_partitions` `Tensor` objects of the same type as data.
<!doctype html> <title>CSS Container Queries Test: Absolute positioned canvas container crash</title> <link rel="help" href="https://drafts.csswg.org/css-contain-3/#size-container"> <link rel="help" href="https://crbug.com/1289850"> <p>Pass if there is no crash.</p> <canvas id="canv" style="display:block;position:absolute;container-type:inline-size"></canvas> <script> canv.offsetTop; canv.appendChild(document.createElement("span")); </script>
// Make the `$localize()` global function available to the compiled templates, and the direct calls // below. This would normally be done inside the application `polyfills.ts` file. import '@angular/localize/init'; import {computeMsgId} from '@angular/compiler'; import {loadTranslations} from '@angular/localize'; export const translations = { [computeMsgId('What needs to be done?', '')]: `Qu'y a-t-il à faire ?`, [computeMsgId('{$START_HEADING_LEVEL1}todos{$CLOSE_HEADING_LEVEL1}{$TAG_INPUT}', '')]: '{$START_HEADING_LEVEL1}liste de tâches{$CLOSE_HEADING_LEVEL1}{$TAG_INPUT}', [computeMsgId('{VAR_PLURAL, plural, =1 {item left} other {items left}}', '')]: '{VAR_PLURAL, plural, =1 {tâche restante} other {tâches restantes}}', [computeMsgId('{$START_TAG_STRONG}{$INTERPOLATION}{$CLOSE_TAG_STRONG}{$ICU}', '')]: '{$START_TAG_STRONG}{$INTERPOLATION}{$CLOSE_TAG_STRONG} {$ICU}', [computeMsgId('Clear Completed', '')]: ' Effacer terminés ', [computeMsgId('Demonstrate Components', '')]: ' Démontrer les components', [computeMsgId('Demonstrate Structural Directives', '')]: 'Démontrer les directives structurelles', [computeMsgId('Demonstrate {$value}', '')]: 'Démontrer {$value}', [computeMsgId('Demonstrate zoneless change detection', '')]: 'Démontrer la détection des changements sans zonejs', [computeMsgId('Demonstrate internationalization', '')]: `Démontrer l'internationalisation` }; loadTranslations(translations);
int TestWtsApiShutdownSystem(int argc, char* argv[]) { BOOL bSuccess; HANDLE hServer; DWORD ShutdownFlag; #ifndef _WIN32 if (!GetEnvironmentVariableA("WTSAPI_LIBRARY", NULL, 0)) { printf("%s: No RDS environment detected, skipping test\n", __FUNCTION__); return 0; } #endif hServer = WTS_CURRENT_SERVER_HANDLE; ShutdownFlag = WTS_WSD_SHUTDOWN; bSuccess = WTSShutdownSystem(hServer, ShutdownFlag); if (!bSuccess) { printf("WTSShutdownSystem failed: %"PRIu32"\n", GetLastError()); return -1; } return 0; }
<?php namespace SebastianBergmann\Diff; use PHPUnit\Framework\TestCase; /** * @covers SebastianBergmann\Diff\Line */ class LineTest extends TestCase { /** * @var Line */ private $line; protected function setUp() { $this->line = new Line; } public function testCanBeCreatedWithoutArguments() { $this->assertInstanceOf('SebastianBergmann\Diff\Line', $this->line); } public function testTypeCanBeRetrieved() { $this->assertEquals(Line::UNCHANGED, $this->line->getType()); } public function testContentCanBeRetrieved() { $this->assertEquals('', $this->line->getContent()); } }
<title>添加推荐审核员</title> <link href="/assets/lib/icheck/icheck.css" rel="stylesheet" type="text/css" /> </head> <body> <div class="pd-20"> <form class="form form-horizontal" id="form-member-add"> <input id="type" value="1" type="hidden"> <div class="row cl"> <label class="form-label col-3"><span class="c-red">*</span>审核员用户名:</label> <div class="formControls col-5"> <!-- <span id="supermarketname"></span> --> <input type="text" class="input-text" value="" placeholder="" id="username" name="username" datatype="*2-16" nullmsg="审核员用户名不能为空"> </div> <div class="col-4"> </div> </div> <div class="row cl"> <label class="form-label col-3"><span class="c-red">*</span>密码:</label> <div class="formControls col-5"> <!-- <span id="supermarketname"></span> --> <input type="text" class="input-text" value="" placeholder="" id="password" name="password" datatype="*2-16" nullmsg="密码不能为空"> </div> <div class="col-4"> </div> </div> <div class="row cl"> <div class="col-9 col-offset-3"> <input class="btn btn-primary radius" type="submit" value="&nbsp;&nbsp;添加&nbsp;&nbsp;"> </div> </div> </form> </div> </div> <script type="text/javascript" src="/assets/lib/icheck/jquery.icheck.min.js"></script> <script charset="utf-8" src="/assets/js/jquery.form.js"></script> <script type="text/javascript"> $(function(){ $('.skin-minimal input').iCheck({ checkboxClass: 'icheckbox-blue', radioClass: 'iradio-blue', increaseArea: '20%' }); $("#form-member-add").Validform({ tiptype:2, callback:function(form){ //alert('ok'); // form[0].submit(); saveAdAdmin(true,function(){ alert('添加成功!'); var index = parent.layer.getFrameIndex(window.name); // parent.$('.btn-refresh').click(); parent.window.location.reload(); parent.layer.close(index); }); } }); }); </script> </body> </html>
<!-- Copyright (c) Microsoft Corporation. All rights reserved --> <!DOCTYPE html> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <title></title> <link rel="stylesheet" href="/css/scenario1.css"> <script src="/js/scenario1.js"></script> </head> <body class="win-type-body"> <div id="scenarioView"> <div id="scenarioHeader"> <h2 id="sampleHeader" class="win-type-subheader">Description:</h2> <div id="scenarioDescription"> Demonstrates use of Ink and Reco APIs. </div> </div> <div id="scenarioContent"> <!-- These 3 canvases are displayed directly on top of each other. The exact height will be computed during inkInitialize(). --> <div id="canvasGroup" aria-label="Ink canvas" role="img"> <canvas id="HighlightCanvas" class="surface"></canvas> <canvas id="InkCanvas" class="surface"></canvas> <canvas id="SelectCanvas" class="surface"></canvas> <div id="SelectionBox" class="rectangle"></div> </div> <!-- The Word item is a dummy, invisible <div> that we position on the selected word. We use it as a marker for the position of the ink in the selected word, since the RecoFlyout must be positioned relative to an HTML element. --> <div id="Word" aria-label="Word" aria-live="polite" role="region"></div> <!-- This toolbar is displayed across the bottom of the screen. The color buttons have IDs which are the names of colors; the ID of each one is fed directly into the strokeStyle of the corresponding canvas.--> <div id="ToolBar" data-win-control="WinJS.UI.ToolBar"> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;Reco&apos;,label:&apos;Recognition&apos;,icon:&apos;characters&apos;,section:&apos;primary&apos;,onclick:Ink.recognize,tooltip:&apos;Recognize handwriting&apos;,priority:1}" class="win-button"></button> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;Find&apos;,label:&apos;Find&apos;, icon:&apos;find&apos;, section:&apos;primary&apos;,type:&apos;flyout&apos;,flyout:&apos;FindFlyout&apos;,tooltip:&apos;Find handwritten text&apos;,priority:1}" class="win-button"></button> <hr data-win-control="WinJS.UI.Command" data-win-options="{type:&apos;separator&apos;}"> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;ModeErase&apos;, label:&apos;Erase&apos;, icon:&apos;clear&apos;, onclick:Ink.eraseMode, section:&apos;primary&apos;,tooltip:&apos;Switch pen tip to eraser mode&apos;,priority:2}" class="win-button"></button> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;ModeSelect&apos;,label:&apos;Select&apos;,icon:&apos;selectall&apos;,onclick:Ink.selectMode,section:&apos;primary&apos;,tooltip:&apos;Switch pen tip to lasso mode&apos;,priority:2}" class="win-button"></button> <hr data-win-control="WinJS.UI.Command" data-win-options="{type:&apos;separator&apos;}"> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;InkColors&apos;,label:&apos;Color&apos;,icon:&apos;fontcolor&apos;,section:&apos;primary&apos;,type:&apos;flyout&apos;,flyout:&apos;InkColorFlyout&apos;,tooltip:&apos;Choose ink color&apos;,priority:2}" class="win-button"></button> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;InkWidth&apos;, label:&apos;Width&apos;,icon:&apos;edit&apos;, section:&apos;primary&apos;,type:&apos;flyout&apos;,flyout:&apos;InkWidthFlyout&apos;,tooltip:&apos;Choose ink width&apos;,priority:2}" class="win-button"></button> <hr data-win-control="WinJS.UI.Command" data-win-options="{type:&apos;separator&apos;}"> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;HighlightColors&apos;,label:&apos;Highlight Color&apos;,icon:&apos;fontcolor&apos;,section:&apos;primary&apos;,type:&apos;flyout&apos;,flyout:&apos;HighlightColorFlyout&apos;,tooltip:&apos;Choose highlighting color&apos;,priority:3}" class="win-button"></button> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;HighlightWidth&apos;, label:&apos;Highlight Width&apos;,icon:&apos;edit&apos;, section:&apos;primary&apos;,type:&apos;flyout&apos;,flyout:&apos;HighlightWidthFlyout&apos;,tooltip:&apos;Choose highlighting width&apos;,priority:3}" class="win-button"></button> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;CopySelected&apos;,label:&apos;Copy&apos;,onclick:Ink.copySelected,section:&apos;secondary&apos;}" class="win-button"></button> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;Paste&apos;,label:&apos;Paste&apos;,onclick:Ink.paste,section:&apos;secondary&apos;}" class="win-button"></button> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;Load&apos;,label:&apos;Load&apos;,onclick:Ink.load,section:&apos;secondary&apos;}" class="win-button"></button> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;Save&apos;,label:&apos;Save&apos;,onclick:Ink.save,section:&apos;secondary&apos;}" class="win-button"></button> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;Clear&apos;,label:&apos;Clear&apos;,onclick:Ink.clear,section:&apos;secondary&apos;}" class="win-button"></button> <button data-win-control="WinJS.UI.Command" data-win-options="{id:&apos;Refresh&apos;,label:&apos;Refresh&apos;,onclick:Ink.refresh,section:&apos;secondary&apos;}" class="win-button"></button> </div> </div> </div> </body> </html>
YUI.add('model-sync-rest', function (Y, NAME) { /** An extension which provides a RESTful XHR sync implementation that can be mixed into a Model or ModelList subclass. @module app @submodule model-sync-rest @since 3.6.0 **/ var Lang = Y.Lang; /** An extension which provides a RESTful XHR sync implementation that can be mixed into a Model or ModelList subclass. This makes it trivial for your Model or ModelList subclasses communicate and transmit their data via RESTful XHRs. In most cases you'll only need to provide a value for `root` when sub-classing `Y.Model`. Y.User = Y.Base.create('user', Y.Model, [Y.ModelSync.REST], { root: '/users' }); Y.Users = Y.Base.create('users', Y.ModelList, [Y.ModelSync.REST], { // By convention `Y.User`'s `root` will be used for the lists' URL. model: Y.User }); var users = new Y.Users(); // GET users list from: "/users" users.load(function () { var firstUser = users.item(0); firstUser.get('id'); // => "1" // PUT updated user data at: "/users/1" firstUser.set('name', 'Eric').save(); }); @class ModelSync.REST @extensionfor Model @extensionfor ModelList @since 3.6.0 **/ function RESTSync() {} /** A request authenticity token to validate HTTP requests made by this extension with the server when the request results in changing persistent state. This allows you to protect your server from Cross-Site Request Forgery attacks. A CSRF token provided by the server can be embedded in the HTML document and assigned to `YUI.Env.CSRF_TOKEN` like this: <script> YUI.Env.CSRF_TOKEN = {{session.authenticityToken}}; </script> The above should come after YUI seed file so that `YUI.Env` will be defined. **Note:** This can be overridden on a per-request basis. See `sync()` method. When a value for the CSRF token is provided, either statically or via `options` passed to the `save()` and `destroy()` methods, the applicable HTTP requests will have a `X-CSRF-Token` header added with the token value. @property CSRF_TOKEN @type String @default YUI.Env.CSRF_TOKEN @static @since 3.6.0 **/ RESTSync.CSRF_TOKEN = YUI.Env.CSRF_TOKEN; /** Static flag to use the HTTP POST method instead of PUT or DELETE. If the server-side HTTP framework isn't RESTful, setting this flag to `true` will cause all PUT and DELETE requests to instead use the POST HTTP method, and add a `X-HTTP-Method-Override` HTTP header with the value of the method type which was overridden. @property EMULATE_HTTP @type Boolean @default false @static @since 3.6.0 **/ RESTSync.EMULATE_HTTP = false; /** Default headers used with all XHRs. By default the `Accept` and `Content-Type` headers are set to "application/json", this signals to the HTTP server to process the request bodies as JSON and send JSON responses. If you're sending and receiving content other than JSON, you can override these headers and the `parse()` and `serialize()` methods. **Note:** These headers will be merged with any request-specific headers, and the request-specific headers will take precedence. @property HTTP_HEADERS @type Object @default { "Accept" : "application/json", "Content-Type": "application/json" } @static @since 3.6.0 **/ RESTSync.HTTP_HEADERS = { 'Accept' : 'application/json', 'Content-Type': 'application/json' }; /** Static mapping of RESTful HTTP methods corresponding to CRUD actions. @property HTTP_METHODS @type Object @default { "create": "POST", "read" : "GET", "update": "PUT", "delete": "DELETE" } @static @since 3.6.0 **/ RESTSync.HTTP_METHODS = { 'create': 'POST', 'read' : 'GET', 'update': 'PUT', 'delete': 'DELETE' }; /** The number of milliseconds before the XHRs will timeout/abort. This defaults to 30 seconds. **Note:** This can be overridden on a per-request basis. See `sync()` method. @property HTTP_TIMEOUT @type Number @default 30000 @static @since 3.6.0 **/ RESTSync.HTTP_TIMEOUT = 30000; /** Properties that shouldn't be turned into ad-hoc attributes when passed to a Model or ModelList constructor. @property _NON_ATTRS_CFG @type Array @default ["root", "url"] @static @protected @since 3.6.0 **/ RESTSync._NON_ATTRS_CFG = ['root', 'url']; RESTSync.prototype = { // -- Public Properties ---------------------------------------------------- /** A string which represents the root or collection part of the URL which relates to a Model or ModelList. Usually this value should be same for all instances of a specific Model/ModelList subclass. When sub-classing `Y.Model`, usually you'll only need to override this property, which lets the URLs for the XHRs be generated by convention. If the `root` string ends with a trailing-slash, XHR URLs will also end with a "/", and if the `root` does not end with a slash, neither will the XHR URLs. @example Y.User = Y.Base.create('user', Y.Model, [Y.ModelSync.REST], { root: '/users' }); var currentUser, newUser; // GET the user data from: "/users/123" currentUser = new Y.User({id: '123'}).load(); // POST the new user data to: "/users" newUser = new Y.User({name: 'Eric Ferraiuolo'}).save(); When sub-classing `Y.ModelList`, usually you'll want to ignore configuring the `root` and simply rely on the build-in convention of the list's generated URLs defaulting to the `root` specified by the list's `model`. @property root @type String @default "" @since 3.6.0 **/ root: '', /** A string which specifies the URL to use when making XHRs, if not value is provided, the URLs used to make XHRs will be generated by convention. While a `url` can be provided for each Model/ModelList instance, usually you'll want to either rely on the default convention or provide a tokenized string on the prototype which can be used for all instances. When sub-classing `Y.Model`, you will probably be able to rely on the default convention of generating URLs in conjunction with the `root` property and whether the model is new or not (i.e. has an `id`). If the `root` property ends with a trailing-slash, the generated URL for the specific model will also end with a trailing-slash. @example Y.User = Y.Base.create('user', Y.Model, [Y.ModelSync.REST], { root: '/users/' }); var currentUser, newUser; // GET the user data from: "/users/123/" currentUser = new Y.User({id: '123'}).load(); // POST the new user data to: "/users/" newUser = new Y.User({name: 'Eric Ferraiuolo'}).save(); If a `url` is specified, it will be processed by `Y.Lang.sub()`, which is useful when the URLs for a Model/ModelList subclass match a specific pattern and can use simple replacement tokens; e.g.: @example Y.User = Y.Base.create('user', Y.Model, [Y.ModelSync.REST], { root: '/users', url : '/users/{username}' }); **Note:** String subsitituion of the `url` only use string an number values provided by this object's attribute and/or the `options` passed to the `getURL()` method. Do not expect something fancy to happen with Object, Array, or Boolean values, they will simply be ignored. If your URLs have plural roots or collection URLs, while the specific item resources are under a singular name, e.g. "/users" (plural) and "/user/123" (singular), you'll probably want to configure the `root` and `url` properties like this: @example Y.User = Y.Base.create('user', Y.Model, [Y.ModelSync.REST], { root: '/users', url : '/user/{id}' }); var currentUser, newUser; // GET the user data from: "/user/123" currentUser = new Y.User({id: '123'}).load(); // POST the new user data to: "/users" newUser = new Y.User({name: 'Eric Ferraiuolo'}).save(); When sub-classing `Y.ModelList`, usually you'll be able to rely on the associated `model` to supply its `root` to be used as the model list's URL. If this needs to be customized, you can provide a simple string for the `url` property. @example Y.Users = Y.Base.create('users', Y.ModelList, [Y.ModelSync.REST], { // Leverages `Y.User`'s `root`, which is "/users". model: Y.User }); // Or specified explicitly... Y.Users = Y.Base.create('users', Y.ModelList, [Y.ModelSync.REST], { model: Y.User, url : '/users' }); @property url @type String @default "" @since 3.6.0 **/ url: '', // -- Lifecycle Methods ---------------------------------------------------- initializer: function (config) { config || (config = {}); // Overrides `root` at the instance level. if ('root' in config) { this.root = config.root || ''; } // Overrides `url` at the instance level. if ('url' in config) { this.url = config.url || ''; } }, // -- Public Methods ------------------------------------------------------- /** Returns the URL for this model or model list for the given `action` and `options`, if specified. This method correctly handles the variations of `root` and `url` values and is called by the `sync()` method to get the URLs used to make the XHRs. You can override this method if you need to provide a specific implementation for how the URLs of your Model and ModelList subclasses need to be generated. @method getURL @param {String} [action] Optional `sync()` action for which to generate the URL. @param {Object} [options] Optional options which may be used to help generate the URL. @return {String} this model's or model list's URL for the the given `action` and `options`. @since 3.6.0 **/ getURL: function (action, options) { var root = this.root, url = this.url; // If this is a model list, use its `url` and substitute placeholders, // but default to the `root` of its `model`. By convention a model's // `root` is the location to a collection resource. if (this._isYUIModelList) { if (!url) { return this.model.prototype.root; } return this._substituteURL(url, Y.merge(this.getAttrs(), options)); } // Assume `this` is a model. // When a model is new, i.e. has no `id`, the `root` should be used. By // convention a model's `root` is the location to a collection resource. // The model's `url` will be used as a fallback if `root` isn't defined. if (root && (action === 'create' || this.isNew())) { return root; } // When a model's `url` is not provided, we'll generate a URL to use by // convention. This will combine the model's `id` with its configured // `root` and add a trailing-slash if the root ends with "/". if (!url) { return this._joinURL(this.getAsURL('id') || ''); } // Substitute placeholders in the `url` with URL-encoded values from the // model's attribute values or the specified `options`. return this._substituteURL(url, Y.merge(this.getAttrs(), options)); }, /** Called to parse the response object returned from `Y.io()`. This method receives the full response object and is expected to "prep" a response which is suitable to pass to the `parse()` method. By default the response body is returned (`responseText`), because it usually represents the entire entity of this model on the server. If you need to parse data out of the response's headers you should do so by overriding this method. If you'd like the entire response object from the XHR to be passed to your `parse()` method, you can simply assign this property to `false`. @method parseIOResponse @param {Object} response Response object from `Y.io()`. @return {Any} The modified response to pass along to the `parse()` method. @since 3.7.0 **/ parseIOResponse: function (response) { return response.responseText; }, /** Serializes `this` model to be used as the HTTP request entity body. By default this model will be serialized to a JSON string via its `toJSON()` method. You can override this method when the HTTP server expects a different representation of this model's data that is different from the default JSON serialization. If you're sending and receive content other than JSON, be sure change the `Accept` and `Content-Type` `HTTP_HEADERS` as well. **Note:** A model's `toJSON()` method can also be overridden. If you only need to modify which attributes are serialized to JSON, that's a better place to start. @method serialize @param {String} [action] Optional `sync()` action for which to generate the the serialized representation of this model. @return {String} serialized HTTP request entity body. @since 3.6.0 **/ serialize: function (action) { return Y.JSON.stringify(this); }, /** Communicates with a RESTful HTTP server by sending and receiving data via XHRs. This method is called internally by load(), save(), and destroy(). The URL used for each XHR will be retrieved by calling the `getURL()` method and passing it the specified `action` and `options`. This method relies heavily on standard RESTful HTTP conventions @method sync @param {String} action Sync action to perform. May be one of the following: * `create`: Store a newly-created model for the first time. * `delete`: Delete an existing model. * `read` : Load an existing model. * `update`: Update an existing model. @param {Object} [options] Sync options: @param {String} [options.csrfToken] The authenticity token used by the server to verify the validity of this request and protected against CSRF attacks. This overrides the default value provided by the static `CSRF_TOKEN` property. @param {Object} [options.headers] The HTTP headers to mix with the default headers specified by the static `HTTP_HEADERS` property. @param {Number} [options.timeout] The number of milliseconds before the request will timeout and be aborted. This overrides the default provided by the static `HTTP_TIMEOUT` property. @param {Function} [callback] Called when the sync operation finishes. @param {Error|null} callback.err If an error occurred, this parameter will contain the error. If the sync operation succeeded, _err_ will be falsy. @param {Any} [callback.response] The server's response. **/ sync: function (action, options, callback) { options || (options = {}); var url = this.getURL(action, options), method = RESTSync.HTTP_METHODS[action], headers = Y.merge(RESTSync.HTTP_HEADERS, options.headers), timeout = options.timeout || RESTSync.HTTP_TIMEOUT, csrfToken = options.csrfToken || RESTSync.CSRF_TOKEN, entity; // Prepare the content if we are sending data to the server. if (method === 'POST' || method === 'PUT') { entity = this.serialize(action); } else { // Remove header, no content is being sent. delete headers['Content-Type']; } // Setup HTTP emulation for older servers if we need it. if (RESTSync.EMULATE_HTTP && (method === 'PUT' || method === 'DELETE')) { // Pass along original method type in the headers. headers['X-HTTP-Method-Override'] = method; // Fall-back to using POST method type. method = 'POST'; } // Add CSRF token to HTTP request headers if one is specified and the // request will cause side effects on the server. if (csrfToken && (method === 'POST' || method === 'PUT' || method === 'DELETE')) { headers['X-CSRF-Token'] = csrfToken; } this._sendSyncIORequest({ action : action, callback: callback, entity : entity, headers : headers, method : method, timeout : timeout, url : url }); }, // -- Protected Methods ---------------------------------------------------- /** Joins the `root` URL to the specified `url`, normalizing leading/trailing "/" characters. @example model.root = '/foo' model._joinURL('bar'); // => '/foo/bar' model._joinURL('/bar'); // => '/foo/bar' model.root = '/foo/' model._joinURL('bar'); // => '/foo/bar/' model._joinURL('/bar'); // => '/foo/bar/' @method _joinURL @param {String} url URL to append to the `root` URL. @return {String} Joined URL. @protected @since 3.6.0 **/ _joinURL: function (url) { var root = this.root; if (!(root || url)) { return ''; } if (url.charAt(0) === '/') { url = url.substring(1); } // Combines the `root` with the `url` and adds a trailing-slash if the // `root` has a trailing-slash. return root && root.charAt(root.length - 1) === '/' ? root + url + '/' : root + '/' + url; }, /** Calls both public, overrideable methods: `parseIOResponse()`, then `parse()` and returns the result. This will call into `parseIOResponse()`, if it's defined as a method, passing it the full response object from the XHR and using its return value to pass along to the `parse()`. This enables developers to easily parse data out of the response headers which should be used by the `parse()` method. @method _parse @param {Object} response Response object from `Y.io()`. @return {Object|Object[]} Attribute hash or Array of model attribute hashes. @protected @since 3.7.0 **/ _parse: function (response) { // When `parseIOResponse` is defined as a method, it will be invoked and // the result will become the new response object that the `parse()` // will be invoked with. if (typeof this.parseIOResponse === 'function') { response = this.parseIOResponse(response); } return this.parse(response); }, /** Performs the XHR and returns the resulting `Y.io()` request object. This method is called by `sync()`. @method _sendSyncIORequest @param {Object} config An object with the following properties: @param {String} config.action The `sync()` action being performed. @param {Function} [config.callback] Called when the sync operation finishes. @param {String} [config.entity] The HTTP request entity body. @param {Object} config.headers The HTTP request headers. @param {String} config.method The HTTP request method. @param {Number} [config.timeout] Time until the HTTP request is aborted. @param {String} config.url The URL of the HTTP resource. @return {Object} The resulting `Y.io()` request object. @protected @since 3.6.0 **/ _sendSyncIORequest: function (config) { return Y.io(config.url, { 'arguments': { action : config.action, callback: config.callback, url : config.url }, context: this, data : config.entity, headers: config.headers, method : config.method, timeout: config.timeout, on: { start : this._onSyncIOStart, failure: this._onSyncIOFailure, success: this._onSyncIOSuccess, end : this._onSyncIOEnd } }); }, /** Utility which takes a tokenized `url` string and substitutes its placeholders using a specified `data` object. This method will property URL-encode any values before substituting them. Also, only expect it to work with String and Number values. @example var url = this._substituteURL('/users/{name}', {id: 'Eric F'}); // => "/users/Eric%20F" @method _substituteURL @param {String} url Tokenized URL string to substitute placeholder values. @param {Object} data Set of data to fill in the `url`'s placeholders. @return {String} Substituted URL. @protected @since 3.6.0 **/ _substituteURL: function (url, data) { if (!url) { return ''; } var values = {}; // Creates a hash of the string and number values only to be used to // replace any placeholders in a tokenized `url`. Y.Object.each(data, function (v, k) { if (Lang.isString(v) || Lang.isNumber(v)) { // URL-encode any string or number values. values[k] = encodeURIComponent(v); } }); return Lang.sub(url, values); }, // -- Event Handlers ------------------------------------------------------- /** Called when the `Y.io` request has finished, after "success" or "failure" has been determined. This is a no-op by default, but provides a hook for overriding. @method _onSyncIOEnd @param {String} txId The `Y.io` transaction id. @param {Object} details Extra details carried through from `sync()`: @param {String} details.action The sync action performed. @param {Function} [details.callback] The function to call after syncing. @param {String} details.url The URL of the requested resource. @protected @since 3.6.0 **/ _onSyncIOEnd: function (txId, details) {}, /** Called when the `Y.io` request has finished unsuccessfully. By default this calls the `details.callback` function passing it the HTTP status code and message as an error object along with the response body. @method _onSyncIOFailure @param {String} txId The `Y.io` transaction id. @param {Object} res The `Y.io` response object. @param {Object} details Extra details carried through from `sync()`: @param {String} details.action The sync action performed. @param {Function} [details.callback] The function to call after syncing. @param {String} details.url The URL of the requested resource. @protected @since 3.6.0 **/ _onSyncIOFailure: function (txId, res, details) { var callback = details.callback; if (callback) { callback({ code: res.status, msg : res.statusText }, res); } }, /** Called when the `Y.io` request has finished successfully. By default this calls the `details.callback` function passing it the response body. @method _onSyncIOSuccess @param {String} txId The `Y.io` transaction id. @param {Object} res The `Y.io` response object. @param {Object} details Extra details carried through from `sync()`: @param {String} details.action The sync action performed. @param {Function} [details.callback] The function to call after syncing. @param {String} details.url The URL of the requested resource. @protected @since 3.6.0 **/ _onSyncIOSuccess: function (txId, res, details) { var callback = details.callback; if (callback) { callback(null, res); } }, /** Called when the `Y.io` request is made. This is a no-op by default, but provides a hook for overriding. @method _onSyncIOStart @param {String} txId The `Y.io` transaction id. @param {Object} details Extra details carried through from `sync()`: @param {String} details.action The sync action performed. @param {Function} [details.callback] The function to call after syncing. @param {String} details.url The URL of the requested resource. @protected @since 3.6.0 **/ _onSyncIOStart: function (txId, details) {} }; // -- Namespace ---------------------------------------------------------------- Y.namespace('ModelSync').REST = RESTSync; }, '3.17.2', {"requires": ["model", "io-base", "json-stringify"]});
function CBIG_LiGSR_KRR_workflowGSP( data_csv, subject_list, RSFC_file, y_list, ... covariate_list, FD_file, DVARS_file, outdir, outstem, num_test_folds, num_inner_folds, ... seed, with_bias, ker_param_file, lambda_set_file, threshold_set_file ) % CBIG_LiGSR_KRR_workflowGSP( data_csv, subject_list, RSFC_file, y_list, ... % covariate_list, outdir, outstem, num_test_folds, num_inner_folds, seed, ... % ker_param_file, lambda_set_file, threshold_set_file) % % This function performs the whole kernel ridge regression procedure for % the Brain Genomics Superstruct Project (GSP) dataset, given a set of % target traits to be predicted (y_list). It first splits the data into % cross-validation folds, then read in the traits and covariates, and calls % "CBIG_KRR_workflow.m" to run kernel ridge regression algorithm. The % prediction accuracy using the optimal hyperparameters will be saved as % [outdir '/final_result_' outstem '.mat']. % % Inputs: % - data_csv % Full path of the CSV file containing behavioral and demographic % information from the GSP dataset. % % - subject_list % Full path of the subject ID list. Each line in this list corresponds % to one subject. % % - RSFC_file % Full path of the resting-state functional connectivity (RSFC) matrix. % % - y_list % Full path to a text file with all y, i.e. behavioral (or demographic) % measures (target measures to be predicted using kernel ridge % regression). Each line in this text file corresponds to one % behavioral name. The behavioral names should correspond to the % headers in "data_csv". % % - covariate_list % A text list of covariate names (e.g. age, sex, FD) that need to be % regressed from y (i.e. the measures to be predicted). % Each line in this text file corresponds to one covariate name. The % covariate names stated in this list, except for 'FD' and 'DVARS', % should correspond to the headers in "data_csv". % % - FD_file (optional) % If there is a need to regress FD (framewise displacement) from the % behavioral (or demographic) measures, the user should include 'FD' in % the "covariate_list". In this case, "FD_file" is the full path of the % mean FD of all subjects. The number of lines in "FD_file" should be % the same as the number of lines in "subject_list". % If the user does not need to regress FD from y, then the input % variable "FD_file" is not required and the user can pass in 'NONE' to % the function. % If "covariate_list" does not contain FD, this argument will be % ignored. % % - DVARS_file (optional) % If there is a need to regress 'DVARS' from the behavioral % (demographic) measures, y, the user must include the covariate % 'DVARS' (or 'DV') in the 'covariate_list'. In this case, "DVARS_file" % is the full path of the mean DVARS of all subjects. The number of % lines in "DVARS_file" should be the same as the number of lines in % "subject_list". % If the user does not need to regress DVARS from y, then the input % variable 'DVARS_file' is not required and the user can pass in 'NONE' % to the function. % If "covariate_list" does not contain DV (or DVARS), this argument % will be ignored. % % - outdir % The full path of output directory. A subfolder % [outdir '/randseed_' seed] will be created to save all output files of % current random seed. % % - outstem % A string appended to the output file names to specify the output % files (y after regression, accuracy files, ...). For example, if % outstem = '58behaviors', then the accuracy files will be names as % <path_to_file>/acc_23behaviors.mat, % and the final output filename will be % [outdir '/randseed_' seed '/final_result_23behaviors.mat']. % If no outstem is required, the user can just pass in an empty string % (''). % % - num_test_folds % A string or scalar, the number of training-test cross-validation % folds. % % - num_inner_folds % A string or scalar. % To select optimal hyperparameters, each training fold will be split % randomly into "num_inner_folds" inner-loop cross-validation folds. % % - seed % A string or scalar, the random seed used to split the data into % training-test cross-validation folds. % % - with_bias (optional) % A scalar (choose from 0 or 1). % - with_bias = 0 means the algorithm is to minimize % (y - K*alpha)^2 + (regularization of alpha); % - with_bias = 1 means the algorithm is to minimize % (y - K*alpha - beta)^2 + (regularization of alpha), where beta is a % constant bias for every subject, estimated from the data. % If not passed in, the default is 0, meaning there will be no bias term. % % - ker_param_file (optional) % Full path of the kernel parameter file (.mat). A structure "ker_param" % is assumed to be saved in this file. % "ker_param" is a K x 1 structure with two fields: type and scale. K % denotes the number of kernels. % ker_param(k).type is a string of the type of k-th kernel. Choose from % 'corr' - Pearson's correlation; % 'Gaussian' - Gaussian kernel; % 'Exponential' - exponential kernel. % ker_param(k).scale is a scalar specifying the scale of k-th kernel % (for Gaussian kernel or exponential kernel). If ker_param(k).type == 'corr', % ker_param(k).scale = NaN. % If this argument is not passed in (or passed in as 'NONE'), then % ker_param will be set as default: % ker_param.type = 'corr'; % ker_param.scale = NaN. % % - lambda_set_file (optional) % Full path of the regularization parameter file (.mat). A vector % "lambda_set" is assumed to be saved in this file. % "lambda_set" is a vector of numbers for grid search of lambda (the % regularization parameter). If this file is not passed in (or passed % in as 'NONE'), it will be set as default: % [ 0 0.00001 0.0001 0.001 0.004 0.007 0.01 0.04 0.07 0.1 0.4 0.7 1 1.5 2 2.5 3 3.5 4 ... % 5 10 15 20 30 40 50 60 70 80 100 150 200 300 500 700 1000 10000 100000 1000000] % % - threshold_set_file (optional) % Full path of the file storing (.mat) the set of threshold used to % binarize the predicted score when the original y is binary. A vector % "threshold_set" is assumed to be saved in this file. % "threshold_set" is a vector used for grid search of optimal % "threshold". If this file is not passed in (or passed in as 'NONE'), % or "threshold_set" is 'NONE', it will be set as default: % [-1:0.1:1]. % % Written by Jingwei Li and CBIG under MIT license: https://github.com/ThomasYeoLab/CBIG/blob/master/LICENSE.md %% setting up if(ischar(num_test_folds)) num_test_folds = str2double(num_test_folds); end if(ischar(num_inner_folds)) num_inner_folds = str2double(num_inner_folds); end if(ischar(seed)) seed = str2double(seed); end if(~exist('with_bias', 'var') || isempty(with_bias)) with_bias = 0; end if(~exist('ker_param_file', 'var') || isempty(ker_param_file) || ... strcmpi(ker_param_file, 'none')) ker_param_file = []; end if(~exist('lambda_set_file', 'var') || isempty(lambda_set_file) || ... strcmpi(lambda_set_file, 'none')) lambda_set_file = fullfile(getenv('CBIG_CODE_DIR'), 'stable_projects', ... 'preprocessing', 'Li2019_GSR', 'KernelRidgeRegression', 'lambda_set.mat'); end if(~exist('threshold_set_file', 'var') || isempty(threshold_set_file) || ... strcmpi(threshold_set_file, 'none')) threshold_set_file = []; end %% Data split fprintf('[GSP workflow]: split families into %d folds.\n', num_test_folds); CBIG_cross_validation_data_split( subject_list, 'NONE', 'NONE', ... 'NONE', num_test_folds, seed, fullfile(outdir, ['randseed_' num2str(seed)]), ',' ); %% Read y % y types fprintf('[GSP workflow]: read the measures to be predicted.\n') [y_names, num_y] = CBIG_text2cell(y_list); for i = 1:num_y if(strcmp(y_names{i}, 'Sex')) y_types{i} = 'categorical'; else y_types{i} = 'continuous'; end end MenTot_flag = 0; if(ismember({'avg_MenRot_non0_CORRpc'}, y_names)) MenTot_flag = 1; idx = find(strcmp(y_names,'avg_MenRot_non0_CORRpc')==1); y_names = [y_names(1:idx-1) {'MenRot_80_CORRpc' 'MenRot_120_CORRpc' 'MenRot_160_CORRpc'} ... y_names(idx+1:end)]; y_types = [y_types(1:idx-1) {'continuous' 'continuous' 'continuous'} y_types(idx+1:end)]; end ystem = outstem; if(~isempty(outstem)) ystem = ['_' outstem]; end if(~exist(fullfile(outdir, ['y' ystem '.mat']), 'file')) CBIG_read_y_from_csv( {data_csv}, 'Subject_ID', y_names, y_types, ... subject_list, fullfile(outdir, ['y' ystem '.mat']), ',' ); if(MenTot_flag == 1) y_tmp = load(fullfile(outdir, ['y' ystem '.mat'])); y_tmp.y(:,idx)=mean(y_tmp.y(:,idx:idx+2),2); y_tmp.y(:,idx+1:idx+2)=[]; save(fullfile(outdir, ['y' ystem '.mat']), '-struct' ,'y_tmp') end end %% Read covariates % covariate types fprintf('[GSP workflow]: read covariates to be regressed from the measures.\n') [cov_names, num_cov] = CBIG_text2cell(covariate_list); for i = 1:num_cov if(strcmp(cov_names{i}, 'Sex') || strcmp(cov_names{i}, 'Race_Ethn')) cov_types{i} = 'categorical'; else cov_types{i} = 'continuous'; end end cov_stem = outstem; if(~isempty(outstem)) cov_stem = ['_' outstem]; end if(~exist(fullfile(outdir, ['covariates' cov_stem '.mat']), 'file')) CBIG_generate_covariates_from_csv( {data_csv}, ... 'Subject_ID', cov_names, cov_types, subject_list, FD_file, DVARS_file, ... fullfile(outdir, ['covariates' cov_stem '.mat']), ',' ); end %% Call kernel regression workflow utility function fprintf('[GSP workflow]: call kernel regression workflow ...\n') sub_fold_file = fullfile(outdir, ['randseed_' num2str(seed)], ... ['no_relative_' num2str(num_test_folds) '_fold_sub_list.mat']); CBIG_KRR_workflow( '', 0, sub_fold_file, fullfile(outdir, ['y' ystem '.mat']), ... fullfile(outdir, ['covariates' cov_stem '.mat']), RSFC_file, num_inner_folds, ... fullfile(outdir, ['randseed_' num2str(seed)]), outstem, 'with_bias',with_bias,... 'ker_param_file', ker_param_file, 'lambda_set_file',lambda_set_file,... 'threshold_set_file',threshold_set_file,'metric', 'corr'); end
 namespace DSInternals.DataStore { /// <summary> /// Domain, forest or DC functional level. /// </summary> /// <remarks> /// We do not want to be dependent on System.DirectoryServices.ActiveDirectory, so we implement our own enum. /// </remarks> /// <see>https://msdn.microsoft.com/en-us/library/cc223743.aspx</see> public enum FunctionalLevel : int { Win2000 = 0, Win2003Mixed = 1, Win2003 = 2, Win2008 = 3, Win2008R2 = 4, Win2012 = 5, Win2012R2 = 6, WinThreshold = 7 } }
#import "CPTTradingRangePlot.h" #import "CPTColor.h" #import "CPTExceptions.h" #import "CPTLegend.h" #import "CPTLineStyle.h" #import "CPTMutableNumericData.h" #import "CPTPlotArea.h" #import "CPTPlotRange.h" #import "CPTPlotSpace.h" #import "CPTPlotSpaceAnnotation.h" #import "CPTUtilities.h" #import "CPTXYPlotSpace.h" #import "NSCoderExtensions.h" #import "tgmath.h" /** @defgroup plotAnimationTradingRangePlot Trading Range Plot * @brief Trading range plot properties that can be animated using Core Animation. * @ingroup plotAnimation **/ /** @if MacOnly * @defgroup plotBindingsTradingRangePlot Trading Range Plot Bindings * @brief Binding identifiers for trading range plots. * @ingroup plotBindings * @endif **/ NSString *const CPTTradingRangePlotBindingXValues = @"xValues"; ///< X values. NSString *const CPTTradingRangePlotBindingOpenValues = @"openValues"; ///< Open price values. NSString *const CPTTradingRangePlotBindingHighValues = @"highValues"; ///< High price values. NSString *const CPTTradingRangePlotBindingLowValues = @"lowValues"; ///< Low price values. NSString *const CPTTradingRangePlotBindingCloseValues = @"closeValues"; ///< Close price values. NSString *const CPTTradingRangePlotBindingIncreaseFills = @"increaseFills"; ///< Fills used with a candlestick plot when close >= open. NSString *const CPTTradingRangePlotBindingDecreaseFills = @"decreaseFills"; ///< Fills used with a candlestick plot when close < open. NSString *const CPTTradingRangePlotBindingLineStyles = @"lineStyles"; ///< Line styles used to draw candlestick or OHLC symbols. NSString *const CPTTradingRangePlotBindingIncreaseLineStyles = @"increaseLineStyles"; ///< Line styles used to outline candlestick symbols when close >= open. NSString *const CPTTradingRangePlotBindingDecreaseLineStyles = @"decreaseLineStyles"; ///< Line styles used to outline candlestick symbols when close < open. static const CPTCoordinate independentCoord = CPTCoordinateX; static const CPTCoordinate dependentCoord = CPTCoordinateY; /// @cond @interface CPTTradingRangePlot() @property (nonatomic, readwrite, copy, nullable) CPTMutableNumericData *xValues; @property (nonatomic, readwrite, copy, nullable) CPTMutableNumericData *openValues; @property (nonatomic, readwrite, copy, nullable) CPTMutableNumericData *highValues; @property (nonatomic, readwrite, copy, nullable) CPTMutableNumericData *lowValues; @property (nonatomic, readwrite, copy, nullable) CPTMutableNumericData *closeValues; @property (nonatomic, readwrite, copy, nullable) CPTFillArray *increaseFills; @property (nonatomic, readwrite, copy, nullable) CPTFillArray *decreaseFills; @property (nonatomic, readwrite, copy, nullable) CPTLineStyleArray *lineStyles; @property (nonatomic, readwrite, copy, nullable) CPTLineStyleArray *increaseLineStyles; @property (nonatomic, readwrite, copy, nullable) CPTLineStyleArray *decreaseLineStyles; @property (nonatomic, readwrite, assign) NSUInteger pointingDeviceDownIndex; -(void)drawCandleStickInContext:(nonnull CGContextRef)context atIndex:(NSUInteger)idx x:(CGFloat)x open:(CGFloat)openValue close:(CGFloat)closeValue high:(CGFloat)highValue low:(CGFloat)lowValue alignPoints:(BOOL)alignPoints; -(void)drawOHLCInContext:(nonnull CGContextRef)context atIndex:(NSUInteger)idx x:(CGFloat)x open:(CGFloat)openValue close:(CGFloat)closeValue high:(CGFloat)highValue low:(CGFloat)lowValue alignPoints:(BOOL)alignPoints; -(nullable CPTFill *)increaseFillForIndex:(NSUInteger)idx; -(nullable CPTFill *)decreaseFillForIndex:(NSUInteger)idx; -(nullable CPTLineStyle *)lineStyleForIndex:(NSUInteger)idx; -(nullable CPTLineStyle *)increaseLineStyleForIndex:(NSUInteger)idx; -(nullable CPTLineStyle *)decreaseLineStyleForIndex:(NSUInteger)idx; @end /// @endcond #pragma mark - /** * @brief A trading range financial plot. * @see See @ref plotAnimationTradingRangePlot "Trading Range Plot" for a list of animatable properties. * @if MacOnly * @see See @ref plotBindingsTradingRangePlot "Trading Range Plot Bindings" for a list of supported binding identifiers. * @endif **/ @implementation CPTTradingRangePlot @dynamic xValues; @dynamic openValues; @dynamic highValues; @dynamic lowValues; @dynamic closeValues; @dynamic increaseFills; @dynamic decreaseFills; @dynamic lineStyles; @dynamic increaseLineStyles; @dynamic decreaseLineStyles; /** @property nullable CPTLineStyle *lineStyle * @brief The line style used to draw candlestick or OHLC symbols. **/ @synthesize lineStyle; /** @property nullable CPTLineStyle *increaseLineStyle * @brief The line style used to outline candlestick symbols or draw OHLC symbols when close >= open. * If @nil, will use @ref lineStyle instead. **/ @synthesize increaseLineStyle; /** @property nullable CPTLineStyle *decreaseLineStyle * @brief The line style used to outline candlestick symbols or draw OHLC symbols when close < open. * If @nil, will use @ref lineStyle instead. **/ @synthesize decreaseLineStyle; /** @property nullable CPTFill *increaseFill * @brief The fill used with a candlestick plot when close >= open. **/ @synthesize increaseFill; /** @property nullable CPTFill *decreaseFill * @brief The fill used with a candlestick plot when close < open. **/ @synthesize decreaseFill; /** @property CPTTradingRangePlotStyle plotStyle * @brief The style of trading range plot drawn. The default is #CPTTradingRangePlotStyleOHLC. **/ @synthesize plotStyle; /** @property CGFloat barWidth * @brief The width of bars in candlestick plots (view coordinates). * @ingroup plotAnimationTradingRangePlot **/ @synthesize barWidth; /** @property CGFloat stickLength * @brief The length of close and open sticks on OHLC plots (view coordinates). * @ingroup plotAnimationTradingRangePlot **/ @synthesize stickLength; /** @property CGFloat barCornerRadius * @brief The corner radius used for candlestick plots. * Defaults to @num{0.0}. * @ingroup plotAnimationTradingRangePlot **/ @synthesize barCornerRadius; /** @property BOOL showBarBorder * @brief If @YES, the candlestick body will show a border. * @ingroup plotAnimationTradingRangePlot **/ @synthesize showBarBorder; /** @internal * @property NSUInteger pointingDeviceDownIndex * @brief The index that was selected on the pointing device down event. **/ @synthesize pointingDeviceDownIndex; #pragma mark - #pragma mark Init/Dealloc /// @cond #if TARGET_OS_SIMULATOR || TARGET_OS_IPHONE #else +(void)initialize { if ( self == [CPTTradingRangePlot class] ) { [self exposeBinding:CPTTradingRangePlotBindingXValues]; [self exposeBinding:CPTTradingRangePlotBindingOpenValues]; [self exposeBinding:CPTTradingRangePlotBindingHighValues]; [self exposeBinding:CPTTradingRangePlotBindingLowValues]; [self exposeBinding:CPTTradingRangePlotBindingCloseValues]; [self exposeBinding:CPTTradingRangePlotBindingIncreaseFills]; [self exposeBinding:CPTTradingRangePlotBindingDecreaseFills]; [self exposeBinding:CPTTradingRangePlotBindingLineStyles]; [self exposeBinding:CPTTradingRangePlotBindingIncreaseLineStyles]; [self exposeBinding:CPTTradingRangePlotBindingDecreaseLineStyles]; } } #endif /// @endcond /// @name Initialization /// @{ /** @brief Initializes a newly allocated CPTTradingRangePlot object with the provided frame rectangle. * * This is the designated initializer. The initialized layer will have the following properties: * - @ref plotStyle = #CPTTradingRangePlotStyleOHLC * - @ref lineStyle = default line style * - @ref increaseLineStyle = @nil * - @ref decreaseLineStyle = @nil * - @ref increaseFill = solid white fill * - @ref decreaseFill = solid black fill * - @ref barWidth = @num{5.0} * - @ref stickLength = @num{3.0} * - @ref barCornerRadius = @num{0.0} * - @ref showBarBorder = @YES * - @ref labelField = #CPTTradingRangePlotFieldClose * * @param newFrame The frame rectangle. * @return The initialized CPTTradingRangePlot object. **/ -(nonnull instancetype)initWithFrame:(CGRect)newFrame { if ( (self = [super initWithFrame:newFrame]) ) { plotStyle = CPTTradingRangePlotStyleOHLC; lineStyle = [[CPTLineStyle alloc] init]; increaseLineStyle = nil; decreaseLineStyle = nil; increaseFill = [[CPTFill alloc] initWithColor:[CPTColor whiteColor]]; decreaseFill = [[CPTFill alloc] initWithColor:[CPTColor blackColor]]; barWidth = CPTFloat(5.0); stickLength = CPTFloat(3.0); barCornerRadius = CPTFloat(0.0); showBarBorder = YES; pointingDeviceDownIndex = NSNotFound; self.labelField = CPTTradingRangePlotFieldClose; } return self; } /// @} /// @cond -(nonnull instancetype)initWithLayer:(nonnull id)layer { if ( (self = [super initWithLayer:layer]) ) { CPTTradingRangePlot *theLayer = (CPTTradingRangePlot *)layer; plotStyle = theLayer->plotStyle; lineStyle = theLayer->lineStyle; increaseLineStyle = theLayer->increaseLineStyle; decreaseLineStyle = theLayer->decreaseLineStyle; increaseFill = theLayer->increaseFill; decreaseFill = theLayer->decreaseFill; barWidth = theLayer->barWidth; stickLength = theLayer->stickLength; barCornerRadius = theLayer->barCornerRadius; showBarBorder = theLayer->showBarBorder; pointingDeviceDownIndex = NSNotFound; } return self; } /// @endcond #pragma mark - #pragma mark NSCoding Methods /// @cond -(void)encodeWithCoder:(nonnull NSCoder *)coder { [super encodeWithCoder:coder]; [coder encodeObject:self.lineStyle forKey:@"CPTTradingRangePlot.lineStyle"]; [coder encodeObject:self.increaseLineStyle forKey:@"CPTTradingRangePlot.increaseLineStyle"]; [coder encodeObject:self.decreaseLineStyle forKey:@"CPTTradingRangePlot.decreaseLineStyle"]; [coder encodeObject:self.increaseFill forKey:@"CPTTradingRangePlot.increaseFill"]; [coder encodeObject:self.decreaseFill forKey:@"CPTTradingRangePlot.decreaseFill"]; [coder encodeInteger:self.plotStyle forKey:@"CPTTradingRangePlot.plotStyle"]; [coder encodeCGFloat:self.barWidth forKey:@"CPTTradingRangePlot.barWidth"]; [coder encodeCGFloat:self.stickLength forKey:@"CPTTradingRangePlot.stickLength"]; [coder encodeCGFloat:self.barCornerRadius forKey:@"CPTTradingRangePlot.barCornerRadius"]; [coder encodeBool:self.showBarBorder forKey:@"CPTTradingRangePlot.showBarBorder"]; // No need to archive these properties: // pointingDeviceDownIndex } -(nullable instancetype)initWithCoder:(nonnull NSCoder *)coder { if ( (self = [super initWithCoder:coder]) ) { lineStyle = [[coder decodeObjectOfClass:[CPTLineStyle class] forKey:@"CPTTradingRangePlot.lineStyle"] copy]; increaseLineStyle = [[coder decodeObjectOfClass:[CPTLineStyle class] forKey:@"CPTTradingRangePlot.increaseLineStyle"] copy]; decreaseLineStyle = [[coder decodeObjectOfClass:[CPTLineStyle class] forKey:@"CPTTradingRangePlot.decreaseLineStyle"] copy]; increaseFill = [[coder decodeObjectOfClass:[CPTFill class] forKey:@"CPTTradingRangePlot.increaseFill"] copy]; decreaseFill = [[coder decodeObjectOfClass:[CPTFill class] forKey:@"CPTTradingRangePlot.decreaseFill"] copy]; plotStyle = (CPTTradingRangePlotStyle)[coder decodeIntegerForKey:@"CPTTradingRangePlot.plotStyle"]; barWidth = [coder decodeCGFloatForKey:@"CPTTradingRangePlot.barWidth"]; stickLength = [coder decodeCGFloatForKey:@"CPTTradingRangePlot.stickLength"]; barCornerRadius = [coder decodeCGFloatForKey:@"CPTTradingRangePlot.barCornerRadius"]; showBarBorder = [coder decodeBoolForKey:@"CPTTradingRangePlot.showBarBorder"]; pointingDeviceDownIndex = NSNotFound; } return self; } /// @endcond #pragma mark - #pragma mark NSSecureCoding Methods /// @cond +(BOOL)supportsSecureCoding { return YES; } /// @endcond #pragma mark - #pragma mark Data Loading /// @cond -(void)reloadDataInIndexRange:(NSRange)indexRange { [super reloadDataInIndexRange:indexRange]; // Fills [self reloadBarFillsInIndexRange:indexRange]; // Line styles [self reloadBarLineStylesInIndexRange:indexRange]; } -(void)reloadPlotDataInIndexRange:(NSRange)indexRange { [super reloadPlotDataInIndexRange:indexRange]; if ( ![self loadNumbersForAllFieldsFromDataSourceInRecordIndexRange:indexRange] ) { id<CPTTradingRangePlotDataSource> theDataSource = (id<CPTTradingRangePlotDataSource>)self.dataSource; if ( theDataSource ) { id newXValues = [self numbersFromDataSourceForField:CPTTradingRangePlotFieldX recordIndexRange:indexRange]; [self cacheNumbers:newXValues forField:CPTTradingRangePlotFieldX atRecordIndex:indexRange.location]; id newOpenValues = [self numbersFromDataSourceForField:CPTTradingRangePlotFieldOpen recordIndexRange:indexRange]; [self cacheNumbers:newOpenValues forField:CPTTradingRangePlotFieldOpen atRecordIndex:indexRange.location]; id newHighValues = [self numbersFromDataSourceForField:CPTTradingRangePlotFieldHigh recordIndexRange:indexRange]; [self cacheNumbers:newHighValues forField:CPTTradingRangePlotFieldHigh atRecordIndex:indexRange.location]; id newLowValues = [self numbersFromDataSourceForField:CPTTradingRangePlotFieldLow recordIndexRange:indexRange]; [self cacheNumbers:newLowValues forField:CPTTradingRangePlotFieldLow atRecordIndex:indexRange.location]; id newCloseValues = [self numbersFromDataSourceForField:CPTTradingRangePlotFieldClose recordIndexRange:indexRange]; [self cacheNumbers:newCloseValues forField:CPTTradingRangePlotFieldClose atRecordIndex:indexRange.location]; } else { self.xValues = nil; self.openValues = nil; self.highValues = nil; self.lowValues = nil; self.closeValues = nil; } } } /// @endcond /** * @brief Reload all bar fills from the data source immediately. **/ -(void)reloadBarFills { [self reloadBarFillsInIndexRange:NSMakeRange(0, self.cachedDataCount)]; } /** @brief Reload bar fills in the given index range from the data source immediately. * @param indexRange The index range to load. **/ -(void)reloadBarFillsInIndexRange:(NSRange)indexRange { id<CPTTradingRangePlotDataSource> theDataSource = (id<CPTTradingRangePlotDataSource>)self.dataSource; BOOL needsLegendUpdate = NO; // Increase fills if ( [theDataSource respondsToSelector:@selector(increaseFillsForTradingRangePlot:recordIndexRange:)] ) { needsLegendUpdate = YES; [self cacheArray:[theDataSource increaseFillsForTradingRangePlot:self recordIndexRange:indexRange] forKey:CPTTradingRangePlotBindingIncreaseFills atRecordIndex:indexRange.location]; } else if ( [theDataSource respondsToSelector:@selector(increaseFillForTradingRangePlot:recordIndex:)] ) { needsLegendUpdate = YES; id nilObject = [CPTPlot nilData]; CPTMutableFillArray *array = [[NSMutableArray alloc] initWithCapacity:indexRange.length]; NSUInteger maxIndex = NSMaxRange(indexRange); for ( NSUInteger idx = indexRange.location; idx < maxIndex; idx++ ) { CPTFill *dataSourceFill = [theDataSource increaseFillForTradingRangePlot:self recordIndex:idx]; if ( dataSourceFill ) { [array addObject:dataSourceFill]; } else { [array addObject:nilObject]; } } [self cacheArray:array forKey:CPTTradingRangePlotBindingIncreaseFills atRecordIndex:indexRange.location]; } // Decrease fills if ( [theDataSource respondsToSelector:@selector(decreaseFillsForTradingRangePlot:recordIndexRange:)] ) { needsLegendUpdate = YES; [self cacheArray:[theDataSource decreaseFillsForTradingRangePlot:self recordIndexRange:indexRange] forKey:CPTTradingRangePlotBindingDecreaseFills atRecordIndex:indexRange.location]; } else if ( [theDataSource respondsToSelector:@selector(decreaseFillForTradingRangePlot:recordIndex:)] ) { needsLegendUpdate = YES; id nilObject = [CPTPlot nilData]; CPTMutableFillArray *array = [[NSMutableArray alloc] initWithCapacity:indexRange.length]; NSUInteger maxIndex = NSMaxRange(indexRange); for ( NSUInteger idx = indexRange.location; idx < maxIndex; idx++ ) { CPTFill *dataSourceFill = [theDataSource decreaseFillForTradingRangePlot:self recordIndex:idx]; if ( dataSourceFill ) { [array addObject:dataSourceFill]; } else { [array addObject:nilObject]; } } [self cacheArray:array forKey:CPTTradingRangePlotBindingDecreaseFills atRecordIndex:indexRange.location]; } // Legend if ( needsLegendUpdate ) { [[NSNotificationCenter defaultCenter] postNotificationName:CPTLegendNeedsRedrawForPlotNotification object:self]; } [self setNeedsDisplay]; } /** * @brief Reload all bar line styles from the data source immediately. **/ -(void)reloadBarLineStyles { [self reloadBarLineStylesInIndexRange:NSMakeRange(0, self.cachedDataCount)]; } /** @brief Reload bar line styles in the given index range from the data source immediately. * @param indexRange The index range to load. **/ -(void)reloadBarLineStylesInIndexRange:(NSRange)indexRange { id<CPTTradingRangePlotDataSource> theDataSource = (id<CPTTradingRangePlotDataSource>)self.dataSource; BOOL needsLegendUpdate = NO; // Line style if ( [theDataSource respondsToSelector:@selector(lineStylesForTradingRangePlot:recordIndexRange:)] ) { needsLegendUpdate = YES; [self cacheArray:[theDataSource lineStylesForTradingRangePlot:self recordIndexRange:indexRange] forKey:CPTTradingRangePlotBindingLineStyles atRecordIndex:indexRange.location]; } else if ( [theDataSource respondsToSelector:@selector(lineStyleForTradingRangePlot:recordIndex:)] ) { needsLegendUpdate = YES; id nilObject = [CPTPlot nilData]; CPTMutableLineStyleArray *array = [[NSMutableArray alloc] initWithCapacity:indexRange.length]; NSUInteger maxIndex = NSMaxRange(indexRange); for ( NSUInteger idx = indexRange.location; idx < maxIndex; idx++ ) { CPTLineStyle *dataSourceLineStyle = [theDataSource lineStyleForTradingRangePlot:self recordIndex:idx]; if ( dataSourceLineStyle ) { [array addObject:dataSourceLineStyle]; } else { [array addObject:nilObject]; } } [self cacheArray:array forKey:CPTTradingRangePlotBindingLineStyles atRecordIndex:indexRange.location]; } // Increase line style if ( [theDataSource respondsToSelector:@selector(increaseLineStylesForTradingRangePlot:recordIndexRange:)] ) { needsLegendUpdate = YES; [self cacheArray:[theDataSource increaseLineStylesForTradingRangePlot:self recordIndexRange:indexRange] forKey:CPTTradingRangePlotBindingIncreaseLineStyles atRecordIndex:indexRange.location]; } else if ( [theDataSource respondsToSelector:@selector(increaseLineStyleForTradingRangePlot:recordIndex:)] ) { needsLegendUpdate = YES; id nilObject = [CPTPlot nilData]; CPTMutableLineStyleArray *array = [[NSMutableArray alloc] initWithCapacity:indexRange.length]; NSUInteger maxIndex = NSMaxRange(indexRange); for ( NSUInteger idx = indexRange.location; idx < maxIndex; idx++ ) { CPTLineStyle *dataSourceLineStyle = [theDataSource increaseLineStyleForTradingRangePlot:self recordIndex:idx]; if ( dataSourceLineStyle ) { [array addObject:dataSourceLineStyle]; } else { [array addObject:nilObject]; } } [self cacheArray:array forKey:CPTTradingRangePlotBindingIncreaseLineStyles atRecordIndex:indexRange.location]; } // Decrease line styles if ( [theDataSource respondsToSelector:@selector(decreaseLineStylesForTradingRangePlot:recordIndexRange:)] ) { needsLegendUpdate = YES; [self cacheArray:[theDataSource decreaseLineStylesForTradingRangePlot:self recordIndexRange:indexRange] forKey:CPTTradingRangePlotBindingDecreaseLineStyles atRecordIndex:indexRange.location]; } else if ( [theDataSource respondsToSelector:@selector(decreaseLineStyleForTradingRangePlot:recordIndex:)] ) { needsLegendUpdate = YES; id nilObject = [CPTPlot nilData]; CPTMutableLineStyleArray *array = [[NSMutableArray alloc] initWithCapacity:indexRange.length]; NSUInteger maxIndex = NSMaxRange(indexRange); for ( NSUInteger idx = indexRange.location; idx < maxIndex; idx++ ) { CPTLineStyle *dataSourceLineStyle = [theDataSource decreaseLineStyleForTradingRangePlot:self recordIndex:idx]; if ( dataSourceLineStyle ) { [array addObject:dataSourceLineStyle]; } else { [array addObject:nilObject]; } } [self cacheArray:array forKey:CPTTradingRangePlotBindingDecreaseLineStyles atRecordIndex:indexRange.location]; } // Legend if ( needsLegendUpdate ) { [[NSNotificationCenter defaultCenter] postNotificationName:CPTLegendNeedsRedrawForPlotNotification object:self]; } [self setNeedsDisplay]; } #pragma mark - #pragma mark Drawing /// @cond -(void)renderAsVectorInContext:(nonnull CGContextRef)context { if ( self.hidden ) { return; } CPTMutableNumericData *locations = [self cachedNumbersForField:CPTTradingRangePlotFieldX]; CPTMutableNumericData *opens = [self cachedNumbersForField:CPTTradingRangePlotFieldOpen]; CPTMutableNumericData *highs = [self cachedNumbersForField:CPTTradingRangePlotFieldHigh]; CPTMutableNumericData *lows = [self cachedNumbersForField:CPTTradingRangePlotFieldLow]; CPTMutableNumericData *closes = [self cachedNumbersForField:CPTTradingRangePlotFieldClose]; NSUInteger sampleCount = locations.numberOfSamples; if ( sampleCount == 0 ) { return; } if ( (opens == nil) || (highs == nil) || (lows == nil) || (closes == nil) ) { return; } if ( (opens.numberOfSamples != sampleCount) || (highs.numberOfSamples != sampleCount) || (lows.numberOfSamples != sampleCount) || (closes.numberOfSamples != sampleCount) ) { [NSException raise:CPTException format:@"Mismatching number of data values in trading range plot"]; } [super renderAsVectorInContext:context]; CGPoint openPoint, highPoint, lowPoint, closePoint; CPTPlotSpace *thePlotSpace = self.plotSpace; CPTTradingRangePlotStyle thePlotStyle = self.plotStyle; BOOL alignPoints = self.alignsPointsToPixels; CGContextBeginTransparencyLayer(context, NULL); if ( self.doublePrecisionCache ) { const double *locationBytes = (const double *)locations.data.bytes; const double *openBytes = (const double *)opens.data.bytes; const double *highBytes = (const double *)highs.data.bytes; const double *lowBytes = (const double *)lows.data.bytes; const double *closeBytes = (const double *)closes.data.bytes; for ( NSUInteger i = 0; i < sampleCount; i++ ) { double plotPoint[2]; plotPoint[independentCoord] = *locationBytes++; if ( isnan(plotPoint[independentCoord]) ) { openBytes++; highBytes++; lowBytes++; closeBytes++; continue; } // open point plotPoint[dependentCoord] = *openBytes++; if ( isnan(plotPoint[dependentCoord]) ) { openPoint = CPTPointMake(NAN, NAN); } else { openPoint = [thePlotSpace plotAreaViewPointForDoublePrecisionPlotPoint:plotPoint numberOfCoordinates:2]; } // high point plotPoint[dependentCoord] = *highBytes++; if ( isnan(plotPoint[dependentCoord]) ) { highPoint = CPTPointMake(NAN, NAN); } else { highPoint = [thePlotSpace plotAreaViewPointForDoublePrecisionPlotPoint:plotPoint numberOfCoordinates:2]; } // low point plotPoint[dependentCoord] = *lowBytes++; if ( isnan(plotPoint[dependentCoord]) ) { lowPoint = CPTPointMake(NAN, NAN); } else { lowPoint = [thePlotSpace plotAreaViewPointForDoublePrecisionPlotPoint:plotPoint numberOfCoordinates:2]; } // close point plotPoint[dependentCoord] = *closeBytes++; if ( isnan(plotPoint[dependentCoord]) ) { closePoint = CPTPointMake(NAN, NAN); } else { closePoint = [thePlotSpace plotAreaViewPointForDoublePrecisionPlotPoint:plotPoint numberOfCoordinates:2]; } CGFloat xCoord = openPoint.x; if ( isnan(xCoord) ) { xCoord = highPoint.x; } else if ( isnan(xCoord) ) { xCoord = lowPoint.x; } else if ( isnan(xCoord) ) { xCoord = closePoint.x; } if ( !isnan(xCoord) ) { // Draw switch ( thePlotStyle ) { case CPTTradingRangePlotStyleOHLC: [self drawOHLCInContext:context atIndex:i x:xCoord open:openPoint.y close:closePoint.y high:highPoint.y low:lowPoint.y alignPoints:alignPoints]; break; case CPTTradingRangePlotStyleCandleStick: [self drawCandleStickInContext:context atIndex:i x:xCoord open:openPoint.y close:closePoint.y high:highPoint.y low:lowPoint.y alignPoints:alignPoints]; break; } } } } else { const NSDecimal *locationBytes = (const NSDecimal *)locations.data.bytes; const NSDecimal *openBytes = (const NSDecimal *)opens.data.bytes; const NSDecimal *highBytes = (const NSDecimal *)highs.data.bytes; const NSDecimal *lowBytes = (const NSDecimal *)lows.data.bytes; const NSDecimal *closeBytes = (const NSDecimal *)closes.data.bytes; for ( NSUInteger i = 0; i < sampleCount; i++ ) { NSDecimal plotPoint[2]; plotPoint[independentCoord] = *locationBytes++; if ( NSDecimalIsNotANumber(&plotPoint[independentCoord]) ) { openBytes++; highBytes++; lowBytes++; closeBytes++; continue; } // open point plotPoint[dependentCoord] = *openBytes++; if ( NSDecimalIsNotANumber(&plotPoint[dependentCoord]) ) { openPoint = CPTPointMake(NAN, NAN); } else { openPoint = [thePlotSpace plotAreaViewPointForPlotPoint:plotPoint numberOfCoordinates:2]; } // high point plotPoint[dependentCoord] = *highBytes++; if ( NSDecimalIsNotANumber(&plotPoint[dependentCoord]) ) { highPoint = CPTPointMake(NAN, NAN); } else { highPoint = [thePlotSpace plotAreaViewPointForPlotPoint:plotPoint numberOfCoordinates:2]; } // low point plotPoint[dependentCoord] = *lowBytes++; if ( NSDecimalIsNotANumber(&plotPoint[dependentCoord]) ) { lowPoint = CPTPointMake(NAN, NAN); } else { lowPoint = [thePlotSpace plotAreaViewPointForPlotPoint:plotPoint numberOfCoordinates:2]; } // close point plotPoint[dependentCoord] = *closeBytes++; if ( NSDecimalIsNotANumber(&plotPoint[dependentCoord]) ) { closePoint = CPTPointMake(NAN, NAN); } else { closePoint = [thePlotSpace plotAreaViewPointForPlotPoint:plotPoint numberOfCoordinates:2]; } CGFloat xCoord = openPoint.x; if ( isnan(xCoord) ) { xCoord = highPoint.x; } else if ( isnan(xCoord) ) { xCoord = lowPoint.x; } else if ( isnan(xCoord) ) { xCoord = closePoint.x; } if ( !isnan(xCoord) ) { // Draw switch ( thePlotStyle ) { case CPTTradingRangePlotStyleOHLC: [self drawOHLCInContext:context atIndex:i x:xCoord open:openPoint.y close:closePoint.y high:highPoint.y low:lowPoint.y alignPoints:alignPoints]; break; case CPTTradingRangePlotStyleCandleStick: [self drawCandleStickInContext:context atIndex:i x:xCoord open:openPoint.y close:closePoint.y high:highPoint.y low:lowPoint.y alignPoints:alignPoints]; break; } } } } CGContextEndTransparencyLayer(context); } -(void)drawCandleStickInContext:(nonnull CGContextRef)context atIndex:(NSUInteger)idx x:(CGFloat)x open:(CGFloat)openValue close:(CGFloat)closeValue high:(CGFloat)highValue low:(CGFloat)lowValue alignPoints:(BOOL)alignPoints { const CGFloat halfBarWidth = CPTFloat(0.5) * self.barWidth; CPTFill *currentBarFill = nil; CPTLineStyle *theBorderLineStyle = nil; if ( !isnan(openValue) && !isnan(closeValue) ) { if ( openValue < closeValue ) { theBorderLineStyle = [self increaseLineStyleForIndex:idx]; currentBarFill = [self increaseFillForIndex:idx]; } else if ( openValue > closeValue ) { theBorderLineStyle = [self decreaseLineStyleForIndex:idx]; currentBarFill = [self decreaseFillForIndex:idx]; } else { theBorderLineStyle = [self lineStyleForIndex:idx]; CPTColor *lineColor = theBorderLineStyle.lineColor; if ( lineColor ) { currentBarFill = [CPTFill fillWithColor:lineColor]; } } } CPTAlignPointFunction alignmentFunction = CPTAlignPointToUserSpace; BOOL hasLineStyle = [theBorderLineStyle isKindOfClass:[CPTLineStyle class]]; if ( hasLineStyle ) { [theBorderLineStyle setLineStyleInContext:context]; CGFloat lineWidth = theBorderLineStyle.lineWidth; if ( ( self.contentsScale > CPTFloat(1.0) ) && (round(lineWidth) == lineWidth) ) { alignmentFunction = CPTAlignIntegralPointToUserSpace; } } // high - low only if ( hasLineStyle && !isnan(highValue) && !isnan(lowValue) && ( isnan(openValue) || isnan(closeValue) ) ) { CGPoint alignedHighPoint = CPTPointMake(x, highValue); CGPoint alignedLowPoint = CPTPointMake(x, lowValue); if ( alignPoints ) { alignedHighPoint = alignmentFunction(context, alignedHighPoint); alignedLowPoint = alignmentFunction(context, alignedLowPoint); } CGMutablePathRef path = CGPathCreateMutable(); CGPathMoveToPoint(path, NULL, alignedHighPoint.x, alignedHighPoint.y); CGPathAddLineToPoint(path, NULL, alignedLowPoint.x, alignedLowPoint.y); CGContextBeginPath(context); CGContextAddPath(context, path); [theBorderLineStyle strokePathInContext:context]; CGPathRelease(path); } // open-close if ( !isnan(openValue) && !isnan(closeValue) ) { if ( currentBarFill || hasLineStyle ) { CGFloat radius = MIN(self.barCornerRadius, halfBarWidth); radius = MIN( radius, ABS(closeValue - openValue) ); CGPoint alignedPoint1 = CPTPointMake(x + halfBarWidth, openValue); CGPoint alignedPoint2 = CPTPointMake(x + halfBarWidth, closeValue); CGPoint alignedPoint3 = CPTPointMake(x, closeValue); CGPoint alignedPoint4 = CPTPointMake(x - halfBarWidth, closeValue); CGPoint alignedPoint5 = CPTPointMake(x - halfBarWidth, openValue); if ( alignPoints ) { if ( hasLineStyle && self.showBarBorder ) { alignedPoint1 = alignmentFunction(context, alignedPoint1); alignedPoint2 = alignmentFunction(context, alignedPoint2); alignedPoint3 = alignmentFunction(context, alignedPoint3); alignedPoint4 = alignmentFunction(context, alignedPoint4); alignedPoint5 = alignmentFunction(context, alignedPoint5); } else { alignedPoint1 = CPTAlignIntegralPointToUserSpace(context, alignedPoint1); alignedPoint2 = CPTAlignIntegralPointToUserSpace(context, alignedPoint2); alignedPoint3 = CPTAlignIntegralPointToUserSpace(context, alignedPoint3); alignedPoint4 = CPTAlignIntegralPointToUserSpace(context, alignedPoint4); alignedPoint5 = CPTAlignIntegralPointToUserSpace(context, alignedPoint5); } } if ( hasLineStyle && (openValue == closeValue) ) { // #285 Draw a cross with open/close values marked const CGFloat halfLineWidth = CPTFloat(0.5) * theBorderLineStyle.lineWidth; alignedPoint1.y -= halfLineWidth; alignedPoint2.y += halfLineWidth; alignedPoint3.y += halfLineWidth; alignedPoint4.y += halfLineWidth; alignedPoint5.y -= halfLineWidth; } CGMutablePathRef path = CGPathCreateMutable(); CGPathMoveToPoint(path, NULL, alignedPoint1.x, alignedPoint1.y); CGPathAddArcToPoint(path, NULL, alignedPoint2.x, alignedPoint2.y, alignedPoint3.x, alignedPoint3.y, radius); CGPathAddArcToPoint(path, NULL, alignedPoint4.x, alignedPoint4.y, alignedPoint5.x, alignedPoint5.y, radius); CGPathAddLineToPoint(path, NULL, alignedPoint5.x, alignedPoint5.y); CGPathCloseSubpath(path); if ( [currentBarFill isKindOfClass:[CPTFill class]] ) { CGContextBeginPath(context); CGContextAddPath(context, path); [currentBarFill fillPathInContext:context]; } if ( hasLineStyle ) { if ( !self.showBarBorder ) { CGPathRelease(path); path = CGPathCreateMutable(); } if ( !isnan(lowValue) ) { if ( lowValue < MIN(openValue, closeValue) ) { CGPoint alignedStartPoint = CPTPointMake( x, MIN(openValue, closeValue) ); CGPoint alignedLowPoint = CPTPointMake(x, lowValue); if ( alignPoints ) { alignedStartPoint = alignmentFunction(context, alignedStartPoint); alignedLowPoint = alignmentFunction(context, alignedLowPoint); } CGPathMoveToPoint(path, NULL, alignedStartPoint.x, alignedStartPoint.y); CGPathAddLineToPoint(path, NULL, alignedLowPoint.x, alignedLowPoint.y); } } if ( !isnan(highValue) ) { if ( highValue > MAX(openValue, closeValue) ) { CGPoint alignedStartPoint = CPTPointMake( x, MAX(openValue, closeValue) ); CGPoint alignedHighPoint = CPTPointMake(x, highValue); if ( alignPoints ) { alignedStartPoint = alignmentFunction(context, alignedStartPoint); alignedHighPoint = alignmentFunction(context, alignedHighPoint); } CGPathMoveToPoint(path, NULL, alignedStartPoint.x, alignedStartPoint.y); CGPathAddLineToPoint(path, NULL, alignedHighPoint.x, alignedHighPoint.y); } } CGContextBeginPath(context); CGContextAddPath(context, path); [theBorderLineStyle strokePathInContext:context]; } CGPathRelease(path); } } } -(void)drawOHLCInContext:(nonnull CGContextRef)context atIndex:(NSUInteger)idx x:(CGFloat)x open:(CGFloat)openValue close:(CGFloat)closeValue high:(CGFloat)highValue low:(CGFloat)lowValue alignPoints:(BOOL)alignPoints { CPTLineStyle *theLineStyle = [self lineStyleForIndex:idx]; if ( !isnan(openValue) && !isnan(closeValue) ) { if ( openValue < closeValue ) { CPTLineStyle *lineStyleForIncrease = [self increaseLineStyleForIndex:idx]; if ( [lineStyleForIncrease isKindOfClass:[CPTLineStyle class]] ) { theLineStyle = lineStyleForIncrease; } } else if ( openValue > closeValue ) { CPTLineStyle *lineStyleForDecrease = [self decreaseLineStyleForIndex:idx]; if ( [lineStyleForDecrease isKindOfClass:[CPTLineStyle class]] ) { theLineStyle = lineStyleForDecrease; } } } if ( [theLineStyle isKindOfClass:[CPTLineStyle class]] ) { CGFloat theStickLength = self.stickLength; CGMutablePathRef path = CGPathCreateMutable(); CPTAlignPointFunction alignmentFunction = CPTAlignPointToUserSpace; CGFloat lineWidth = theLineStyle.lineWidth; if ( ( self.contentsScale > CPTFloat(1.0) ) && (round(lineWidth) == lineWidth) ) { alignmentFunction = CPTAlignIntegralPointToUserSpace; } // high-low if ( !isnan(highValue) && !isnan(lowValue) ) { CGPoint alignedHighPoint = CPTPointMake(x, highValue); CGPoint alignedLowPoint = CPTPointMake(x, lowValue); if ( alignPoints ) { alignedHighPoint = alignmentFunction(context, alignedHighPoint); alignedLowPoint = alignmentFunction(context, alignedLowPoint); } CGPathMoveToPoint(path, NULL, alignedHighPoint.x, alignedHighPoint.y); CGPathAddLineToPoint(path, NULL, alignedLowPoint.x, alignedLowPoint.y); } // open if ( !isnan(openValue) ) { CGPoint alignedOpenStartPoint = CPTPointMake(x, openValue); CGPoint alignedOpenEndPoint = CPTPointMake(x - theStickLength, openValue); // left side if ( alignPoints ) { alignedOpenStartPoint = alignmentFunction(context, alignedOpenStartPoint); alignedOpenEndPoint = alignmentFunction(context, alignedOpenEndPoint); } CGPathMoveToPoint(path, NULL, alignedOpenStartPoint.x, alignedOpenStartPoint.y); CGPathAddLineToPoint(path, NULL, alignedOpenEndPoint.x, alignedOpenEndPoint.y); } // close if ( !isnan(closeValue) ) { CGPoint alignedCloseStartPoint = CPTPointMake(x, closeValue); CGPoint alignedCloseEndPoint = CPTPointMake(x + theStickLength, closeValue); // right side if ( alignPoints ) { alignedCloseStartPoint = alignmentFunction(context, alignedCloseStartPoint); alignedCloseEndPoint = alignmentFunction(context, alignedCloseEndPoint); } CGPathMoveToPoint(path, NULL, alignedCloseStartPoint.x, alignedCloseStartPoint.y); CGPathAddLineToPoint(path, NULL, alignedCloseEndPoint.x, alignedCloseEndPoint.y); } CGContextBeginPath(context); CGContextAddPath(context, path); [theLineStyle setLineStyleInContext:context]; [theLineStyle strokePathInContext:context]; CGPathRelease(path); } } -(void)drawSwatchForLegend:(nonnull CPTLegend *)legend atIndex:(NSUInteger)idx inRect:(CGRect)rect inContext:(nonnull CGContextRef)context { [super drawSwatchForLegend:legend atIndex:idx inRect:rect inContext:context]; if ( self.drawLegendSwatchDecoration ) { [self.lineStyle setLineStyleInContext:context]; switch ( self.plotStyle ) { case CPTTradingRangePlotStyleOHLC: [self drawOHLCInContext:context atIndex:idx x:CGRectGetMidX(rect) open:CGRectGetMinY(rect) + rect.size.height / CPTFloat(3.0) close:CGRectGetMinY(rect) + rect.size.height * (CGFloat)(2.0 / 3.0) high:CGRectGetMaxY(rect) low:CGRectGetMinY(rect) alignPoints:YES]; break; case CPTTradingRangePlotStyleCandleStick: [self drawCandleStickInContext:context atIndex:idx x:CGRectGetMidX(rect) open:CGRectGetMinY(rect) + rect.size.height / CPTFloat(3.0) close:CGRectGetMinY(rect) + rect.size.height * (CGFloat)(2.0 / 3.0) high:CGRectGetMaxY(rect) low:CGRectGetMinY(rect) alignPoints:YES]; break; } } } -(nullable CPTFill *)increaseFillForIndex:(NSUInteger)idx { CPTFill *theFill = [self cachedValueForKey:CPTTradingRangePlotBindingIncreaseFills recordIndex:idx]; if ( (theFill == nil) || (theFill == [CPTPlot nilData]) ) { theFill = self.increaseFill; } return theFill; } -(nullable CPTFill *)decreaseFillForIndex:(NSUInteger)idx { CPTFill *theFill = [self cachedValueForKey:CPTTradingRangePlotBindingDecreaseFills recordIndex:idx]; if ( (theFill == nil) || (theFill == [CPTPlot nilData]) ) { theFill = self.decreaseFill; } return theFill; } -(nullable CPTLineStyle *)lineStyleForIndex:(NSUInteger)idx { CPTLineStyle *theLineStyle = [self cachedValueForKey:CPTTradingRangePlotBindingLineStyles recordIndex:idx]; if ( (theLineStyle == nil) || (theLineStyle == [CPTPlot nilData]) ) { theLineStyle = self.lineStyle; } return theLineStyle; } -(nullable CPTLineStyle *)increaseLineStyleForIndex:(NSUInteger)idx { CPTLineStyle *theLineStyle = [self cachedValueForKey:CPTTradingRangePlotBindingIncreaseLineStyles recordIndex:idx]; if ( (theLineStyle == nil) || (theLineStyle == [CPTPlot nilData]) ) { theLineStyle = self.increaseLineStyle; } if ( theLineStyle == nil ) { theLineStyle = [self lineStyleForIndex:idx]; } return theLineStyle; } -(nullable CPTLineStyle *)decreaseLineStyleForIndex:(NSUInteger)idx { CPTLineStyle *theLineStyle = [self cachedValueForKey:CPTTradingRangePlotBindingDecreaseLineStyles recordIndex:idx]; if ( (theLineStyle == nil) || (theLineStyle == [CPTPlot nilData]) ) { theLineStyle = self.decreaseLineStyle; } if ( theLineStyle == nil ) { theLineStyle = [self lineStyleForIndex:idx]; } return theLineStyle; } /// @endcond #pragma mark - #pragma mark Animation /// @cond +(BOOL)needsDisplayForKey:(nonnull NSString *)aKey { static NSSet<NSString *> *keys = nil; static dispatch_once_t onceToken = 0; dispatch_once(&onceToken, ^{ keys = [NSSet setWithArray:@[@"barWidth", @"stickLength", @"barCornerRadius", @"showBarBorder"]]; }); if ( [keys containsObject:aKey] ) { return YES; } else { return [super needsDisplayForKey:aKey]; } } /// @endcond #pragma mark - #pragma mark Fields /// @cond -(NSUInteger)numberOfFields { return 5; } -(nonnull CPTNumberArray *)fieldIdentifiers { return @[@(CPTTradingRangePlotFieldX), @(CPTTradingRangePlotFieldOpen), @(CPTTradingRangePlotFieldClose), @(CPTTradingRangePlotFieldHigh), @(CPTTradingRangePlotFieldLow)]; } -(nonnull CPTNumberArray *)fieldIdentifiersForCoordinate:(CPTCoordinate)coord { CPTNumberArray *result = nil; switch ( coord ) { case CPTCoordinateX: result = @[@(CPTTradingRangePlotFieldX)]; break; case CPTCoordinateY: result = @[@(CPTTradingRangePlotFieldOpen), @(CPTTradingRangePlotFieldLow), @(CPTTradingRangePlotFieldHigh), @(CPTTradingRangePlotFieldClose)]; break; default: [NSException raise:CPTException format:@"Invalid coordinate passed to fieldIdentifiersForCoordinate:"]; break; } return result; } -(CPTCoordinate)coordinateForFieldIdentifier:(NSUInteger)field { CPTCoordinate coordinate = CPTCoordinateNone; switch ( field ) { case CPTTradingRangePlotFieldX: coordinate = CPTCoordinateX; break; case CPTTradingRangePlotFieldOpen: case CPTTradingRangePlotFieldLow: case CPTTradingRangePlotFieldHigh: case CPTTradingRangePlotFieldClose: coordinate = CPTCoordinateY; break; default: break; } return coordinate; } /// @endcond #pragma mark - #pragma mark Data Labels /// @cond -(void)positionLabelAnnotation:(nonnull CPTPlotSpaceAnnotation *)label forIndex:(NSUInteger)idx { BOOL positiveDirection = YES; CPTPlotRange *yRange = [self.plotSpace plotRangeForCoordinate:CPTCoordinateY]; if ( CPTDecimalLessThan( yRange.lengthDecimal, CPTDecimalFromInteger(0) ) ) { positiveDirection = !positiveDirection; } NSNumber *xValue = [self cachedNumberForField:CPTTradingRangePlotFieldX recordIndex:idx]; NSNumber *yValue; CPTNumberArray *yValues = @[[self cachedNumberForField:CPTTradingRangePlotFieldOpen recordIndex:idx], [self cachedNumberForField:CPTTradingRangePlotFieldClose recordIndex:idx], [self cachedNumberForField:CPTTradingRangePlotFieldHigh recordIndex:idx], [self cachedNumberForField:CPTTradingRangePlotFieldLow recordIndex:idx]]; CPTNumberArray *yValuesSorted = [yValues sortedArrayUsingSelector:@selector(compare:)]; if ( positiveDirection ) { yValue = yValuesSorted.lastObject; } else { yValue = yValuesSorted[0]; } label.anchorPlotPoint = @[xValue, yValue]; if ( positiveDirection ) { label.displacement = CPTPointMake(0.0, self.labelOffset); } else { label.displacement = CPTPointMake(0.0, -self.labelOffset); } label.contentLayer.hidden = self.hidden || isnan([xValue doubleValue]) || isnan([yValue doubleValue]); } /// @endcond #pragma mark - #pragma mark Responder Chain and User Interaction /// @cond -(NSUInteger)dataIndexFromInteractionPoint:(CGPoint)point { NSUInteger dataCount = self.cachedDataCount; CPTMutableNumericData *locations = [self cachedNumbersForField:CPTTradingRangePlotFieldX]; CPTMutableNumericData *opens = [self cachedNumbersForField:CPTTradingRangePlotFieldOpen]; CPTMutableNumericData *highs = [self cachedNumbersForField:CPTTradingRangePlotFieldHigh]; CPTMutableNumericData *lows = [self cachedNumbersForField:CPTTradingRangePlotFieldLow]; CPTMutableNumericData *closes = [self cachedNumbersForField:CPTTradingRangePlotFieldClose]; CPTXYPlotSpace *thePlotSpace = (CPTXYPlotSpace *)self.plotSpace; CPTPlotRange *xRange = thePlotSpace.xRange; CPTPlotRange *yRange = thePlotSpace.yRange; CGPoint openPoint, highPoint, lowPoint, closePoint; CGFloat lastViewX = CPTFloat(0.0); CGFloat lastViewMin = CPTFloat(0.0); CGFloat lastViewMax = CPTFloat(0.0); NSUInteger result = NSNotFound; CGFloat minimumDistanceSquared = CPTNAN; if ( self.doublePrecisionCache ) { const double *locationBytes = (const double *)locations.data.bytes; const double *openBytes = (const double *)opens.data.bytes; const double *highBytes = (const double *)highs.data.bytes; const double *lowBytes = (const double *)lows.data.bytes; const double *closeBytes = (const double *)closes.data.bytes; for ( NSUInteger i = 0; i < dataCount; i++ ) { double plotPoint[2]; plotPoint[independentCoord] = *locationBytes++; if ( isnan(plotPoint[independentCoord]) || ![xRange containsDouble:plotPoint[independentCoord]] ) { openBytes++; highBytes++; lowBytes++; closeBytes++; continue; } // open point plotPoint[dependentCoord] = *openBytes++; if ( !isnan(plotPoint[dependentCoord]) && [yRange containsDouble:plotPoint[dependentCoord]] ) { openPoint = [thePlotSpace plotAreaViewPointForDoublePrecisionPlotPoint:plotPoint numberOfCoordinates:2]; CGFloat distanceSquared = squareOfDistanceBetweenPoints(point, openPoint); if ( isnan(minimumDistanceSquared) || (distanceSquared < minimumDistanceSquared) ) { minimumDistanceSquared = distanceSquared; result = i; } } else { openPoint = CPTPointMake(NAN, NAN); } // high point plotPoint[dependentCoord] = *highBytes++; if ( !isnan(plotPoint[dependentCoord]) && [yRange containsDouble:plotPoint[dependentCoord]] ) { highPoint = [thePlotSpace plotAreaViewPointForDoublePrecisionPlotPoint:plotPoint numberOfCoordinates:2]; CGFloat distanceSquared = squareOfDistanceBetweenPoints(point, highPoint); if ( isnan(minimumDistanceSquared) || (distanceSquared < minimumDistanceSquared) ) { minimumDistanceSquared = distanceSquared; result = i; } } else { highPoint = CPTPointMake(NAN, NAN); } // low point plotPoint[dependentCoord] = *lowBytes++; if ( !isnan(plotPoint[dependentCoord]) && [yRange containsDouble:plotPoint[dependentCoord]] ) { lowPoint = [thePlotSpace plotAreaViewPointForDoublePrecisionPlotPoint:plotPoint numberOfCoordinates:2]; CGFloat distanceSquared = squareOfDistanceBetweenPoints(point, lowPoint); if ( isnan(minimumDistanceSquared) || (distanceSquared < minimumDistanceSquared) ) { minimumDistanceSquared = distanceSquared; result = i; } } else { lowPoint = CPTPointMake(NAN, NAN); } // close point plotPoint[dependentCoord] = *closeBytes++; if ( !isnan(plotPoint[dependentCoord]) && [yRange containsDouble:plotPoint[dependentCoord]] ) { closePoint = [thePlotSpace plotAreaViewPointForDoublePrecisionPlotPoint:plotPoint numberOfCoordinates:2]; CGFloat distanceSquared = squareOfDistanceBetweenPoints(point, closePoint); if ( isnan(minimumDistanceSquared) || (distanceSquared < minimumDistanceSquared) ) { minimumDistanceSquared = distanceSquared; result = i; } } else { closePoint = CPTPointMake(NAN, NAN); } if ( result == i ) { lastViewX = openPoint.x; if ( isnan(lastViewX) ) { lastViewX = highPoint.x; } else if ( isnan(lastViewX) ) { lastViewX = lowPoint.x; } else if ( isnan(lastViewX) ) { lastViewX = closePoint.x; } lastViewMin = MIN( MIN(openPoint.y, closePoint.y), MIN(highPoint.y, lowPoint.y) ); lastViewMax = MAX( MAX(openPoint.y, closePoint.y), MAX(highPoint.y, lowPoint.y) ); } } } else { const NSDecimal *locationBytes = (const NSDecimal *)locations.data.bytes; const NSDecimal *openBytes = (const NSDecimal *)opens.data.bytes; const NSDecimal *highBytes = (const NSDecimal *)highs.data.bytes; const NSDecimal *lowBytes = (const NSDecimal *)lows.data.bytes; const NSDecimal *closeBytes = (const NSDecimal *)closes.data.bytes; for ( NSUInteger i = 0; i < dataCount; i++ ) { NSDecimal plotPoint[2]; plotPoint[dependentCoord] = CPTDecimalNaN(); plotPoint[independentCoord] = *locationBytes++; if ( NSDecimalIsNotANumber(&plotPoint[independentCoord]) || ![xRange contains:plotPoint[independentCoord]] ) { openBytes++; highBytes++; lowBytes++; closeBytes++; continue; } // open point plotPoint[dependentCoord] = *openBytes++; if ( !NSDecimalIsNotANumber(&plotPoint[dependentCoord]) && [yRange contains:plotPoint[dependentCoord]] ) { openPoint = [thePlotSpace plotAreaViewPointForPlotPoint:plotPoint numberOfCoordinates:2]; CGFloat distanceSquared = squareOfDistanceBetweenPoints(point, openPoint); if ( isnan(minimumDistanceSquared) || (distanceSquared < minimumDistanceSquared) ) { minimumDistanceSquared = distanceSquared; result = i; } } else { openPoint = CPTPointMake(NAN, NAN); } // high point plotPoint[dependentCoord] = *highBytes++; if ( !NSDecimalIsNotANumber(&plotPoint[dependentCoord]) && [yRange contains:plotPoint[dependentCoord]] ) { highPoint = [thePlotSpace plotAreaViewPointForPlotPoint:plotPoint numberOfCoordinates:2]; CGFloat distanceSquared = squareOfDistanceBetweenPoints(point, highPoint); if ( isnan(minimumDistanceSquared) || (distanceSquared < minimumDistanceSquared) ) { minimumDistanceSquared = distanceSquared; result = i; } } else { highPoint = CPTPointMake(NAN, NAN); } // low point plotPoint[dependentCoord] = *lowBytes++; if ( !NSDecimalIsNotANumber(&plotPoint[dependentCoord]) && [yRange contains:plotPoint[dependentCoord]] ) { lowPoint = [thePlotSpace plotAreaViewPointForPlotPoint:plotPoint numberOfCoordinates:2]; CGFloat distanceSquared = squareOfDistanceBetweenPoints(point, lowPoint); if ( isnan(minimumDistanceSquared) || (distanceSquared < minimumDistanceSquared) ) { minimumDistanceSquared = distanceSquared; result = i; } } else { lowPoint = CPTPointMake(NAN, NAN); } // close point plotPoint[dependentCoord] = *closeBytes++; if ( !NSDecimalIsNotANumber(&plotPoint[dependentCoord]) && [yRange contains:plotPoint[dependentCoord]] ) { closePoint = [thePlotSpace plotAreaViewPointForPlotPoint:plotPoint numberOfCoordinates:2]; CGFloat distanceSquared = squareOfDistanceBetweenPoints(point, closePoint); if ( isnan(minimumDistanceSquared) || (distanceSquared < minimumDistanceSquared) ) { minimumDistanceSquared = distanceSquared; result = i; } } else { closePoint = CPTPointMake(NAN, NAN); } if ( result == i ) { lastViewX = openPoint.x; if ( isnan(lastViewX) ) { lastViewX = highPoint.x; } else if ( isnan(lastViewX) ) { lastViewX = lowPoint.x; } else if ( isnan(lastViewX) ) { lastViewX = closePoint.x; } lastViewMin = MIN( MIN(openPoint.y, closePoint.y), MIN(highPoint.y, lowPoint.y) ); lastViewMax = MAX( MAX(openPoint.y, closePoint.y), MAX(highPoint.y, lowPoint.y) ); } } } if ( result != NSNotFound ) { CGFloat offset = CPTFloat(0.0); switch ( self.plotStyle ) { case CPTTradingRangePlotStyleOHLC: offset = self.stickLength; break; case CPTTradingRangePlotStyleCandleStick: offset = self.barWidth * CPTFloat(0.5); break; } if ( ( point.x < (lastViewX - offset) ) || ( point.x > (lastViewX + offset) ) ) { result = NSNotFound; } if ( (point.y < lastViewMin) || (point.y > lastViewMax) ) { result = NSNotFound; } } return result; } /// @endcond /// @name User Interaction /// @{ /** * @brief Informs the receiver that the user has * @if MacOnly pressed the mouse button. @endif * @if iOSOnly started touching the screen. @endif * * * If this plot has a delegate that responds to the * @link CPTTradingRangePlotDelegate::tradingRangePlot:barTouchDownAtRecordIndex: -tradingRangePlot:barTouchDownAtRecordIndex: @endlink or * @link CPTTradingRangePlotDelegate::tradingRangePlot:barTouchDownAtRecordIndex:withEvent: -tradingRangePlot:barTouchDownAtRecordIndex:withEvent: @endlink * methods, the @par{interactionPoint} is compared with each bar in index order. * The delegate method will be called and this method returns @YES for the first * index where the @par{interactionPoint} is inside a bar. * This method returns @NO if the @par{interactionPoint} is outside all of the bars. * * @param event The OS event. * @param interactionPoint The coordinates of the interaction. * @return Whether the event was handled or not. **/ -(BOOL)pointingDeviceDownEvent:(nonnull CPTNativeEvent *)event atPoint:(CGPoint)interactionPoint { CPTGraph *theGraph = self.graph; CPTPlotArea *thePlotArea = self.plotArea; if ( !theGraph || !thePlotArea || self.hidden ) { return NO; } id<CPTTradingRangePlotDelegate> theDelegate = self.delegate; if ( [theDelegate respondsToSelector:@selector(tradingRangePlot:barTouchDownAtRecordIndex:)] || [theDelegate respondsToSelector:@selector(tradingRangePlot:barTouchDownAtRecordIndex:withEvent:)] || [theDelegate respondsToSelector:@selector(tradingRangePlot:barWasSelectedAtRecordIndex:)] || [theDelegate respondsToSelector:@selector(tradingRangePlot:barWasSelectedAtRecordIndex:withEvent:)] ) { // Inform delegate if a point was hit CGPoint plotAreaPoint = [theGraph convertPoint:interactionPoint toLayer:thePlotArea]; NSUInteger idx = [self dataIndexFromInteractionPoint:plotAreaPoint]; self.pointingDeviceDownIndex = idx; if ( idx != NSNotFound ) { BOOL handled = NO; if ( [theDelegate respondsToSelector:@selector(tradingRangePlot:barTouchDownAtRecordIndex:)] ) { handled = YES; [theDelegate tradingRangePlot:self barTouchDownAtRecordIndex:idx]; } if ( [theDelegate respondsToSelector:@selector(tradingRangePlot:barTouchDownAtRecordIndex:withEvent:)] ) { handled = YES; [theDelegate tradingRangePlot:self barTouchDownAtRecordIndex:idx withEvent:event]; } if ( handled ) { return YES; } } } return [super pointingDeviceDownEvent:event atPoint:interactionPoint]; } /** * @brief Informs the receiver that the user has * @if MacOnly released the mouse button. @endif * @if iOSOnly ended touching the screen. @endif * * * If this plot has a delegate that responds to the * @link CPTTradingRangePlotDelegate::tradingRangePlot:barTouchUpAtRecordIndex: -tradingRangePlot:barTouchUpAtRecordIndex: @endlink and/or * @link CPTTradingRangePlotDelegate::tradingRangePlot:barTouchUpAtRecordIndex:withEvent: -tradingRangePlot:barTouchUpAtRecordIndex:withEvent: @endlink * methods, the @par{interactionPoint} is compared with each bar in index order. * The delegate method will be called and this method returns @YES for the first * index where the @par{interactionPoint} is inside a bar. * This method returns @NO if the @par{interactionPoint} is outside all of the bars. * * If the bar being released is the same as the one that was pressed (see * @link CPTTradingRangePlot::pointingDeviceDownEvent:atPoint: -pointingDeviceDownEvent:atPoint: @endlink), if the delegate responds to the * @link CPTTradingRangePlotDelegate::tradingRangePlot:barWasSelectedAtRecordIndex: -tradingRangePlot:barWasSelectedAtRecordIndex: @endlink and/or * @link CPTTradingRangePlotDelegate::tradingRangePlot:barWasSelectedAtRecordIndex:withEvent: -tradingRangePlot:barWasSelectedAtRecordIndex:withEvent: @endlink * methods, these will be called. * * @param event The OS event. * @param interactionPoint The coordinates of the interaction. * @return Whether the event was handled or not. **/ -(BOOL)pointingDeviceUpEvent:(nonnull CPTNativeEvent *)event atPoint:(CGPoint)interactionPoint { NSUInteger selectedDownIndex = self.pointingDeviceDownIndex; self.pointingDeviceDownIndex = NSNotFound; CPTGraph *theGraph = self.graph; CPTPlotArea *thePlotArea = self.plotArea; if ( !theGraph || !thePlotArea || self.hidden ) { return NO; } id<CPTTradingRangePlotDelegate> theDelegate = self.delegate; if ( [theDelegate respondsToSelector:@selector(tradingRangePlot:barTouchUpAtRecordIndex:)] || [theDelegate respondsToSelector:@selector(tradingRangePlot:barTouchUpAtRecordIndex:withEvent:)] || [theDelegate respondsToSelector:@selector(tradingRangePlot:barWasSelectedAtRecordIndex:)] || [theDelegate respondsToSelector:@selector(tradingRangePlot:barWasSelectedAtRecordIndex:withEvent:)] ) { // Inform delegate if a point was hit CGPoint plotAreaPoint = [theGraph convertPoint:interactionPoint toLayer:thePlotArea]; NSUInteger idx = [self dataIndexFromInteractionPoint:plotAreaPoint]; if ( idx != NSNotFound ) { BOOL handled = NO; if ( [theDelegate respondsToSelector:@selector(tradingRangePlot:barTouchUpAtRecordIndex:)] ) { handled = YES; [theDelegate tradingRangePlot:self barTouchUpAtRecordIndex:idx]; } if ( [theDelegate respondsToSelector:@selector(tradingRangePlot:barTouchUpAtRecordIndex:withEvent:)] ) { handled = YES; [theDelegate tradingRangePlot:self barTouchUpAtRecordIndex:idx withEvent:event]; } if ( idx == selectedDownIndex ) { if ( [theDelegate respondsToSelector:@selector(tradingRangePlot:barWasSelectedAtRecordIndex:)] ) { handled = YES; [theDelegate tradingRangePlot:self barWasSelectedAtRecordIndex:idx]; } if ( [theDelegate respondsToSelector:@selector(tradingRangePlot:barWasSelectedAtRecordIndex:withEvent:)] ) { handled = YES; [theDelegate tradingRangePlot:self barWasSelectedAtRecordIndex:idx withEvent:event]; } } if ( handled ) { return YES; } } } return [super pointingDeviceUpEvent:event atPoint:interactionPoint]; } /// @} #pragma mark - #pragma mark Accessors /// @cond -(void)setPlotStyle:(CPTTradingRangePlotStyle)newPlotStyle { if ( plotStyle != newPlotStyle ) { plotStyle = newPlotStyle; [self setNeedsDisplay]; [[NSNotificationCenter defaultCenter] postNotificationName:CPTLegendNeedsRedrawForPlotNotification object:self]; } } -(void)setLineStyle:(nullable CPTLineStyle *)newLineStyle { if ( lineStyle != newLineStyle ) { lineStyle = [newLineStyle copy]; [self setNeedsDisplay]; [[NSNotificationCenter defaultCenter] postNotificationName:CPTLegendNeedsRedrawForPlotNotification object:self]; } } -(void)setIncreaseLineStyle:(nullable CPTLineStyle *)newLineStyle { if ( increaseLineStyle != newLineStyle ) { increaseLineStyle = [newLineStyle copy]; [self setNeedsDisplay]; [[NSNotificationCenter defaultCenter] postNotificationName:CPTLegendNeedsRedrawForPlotNotification object:self]; } } -(void)setDecreaseLineStyle:(nullable CPTLineStyle *)newLineStyle { if ( decreaseLineStyle != newLineStyle ) { decreaseLineStyle = [newLineStyle copy]; [self setNeedsDisplay]; [[NSNotificationCenter defaultCenter] postNotificationName:CPTLegendNeedsRedrawForPlotNotification object:self]; } } -(void)setIncreaseFill:(nullable CPTFill *)newFill { if ( increaseFill != newFill ) { increaseFill = [newFill copy]; [self setNeedsDisplay]; [[NSNotificationCenter defaultCenter] postNotificationName:CPTLegendNeedsRedrawForPlotNotification object:self]; } } -(void)setDecreaseFill:(nullable CPTFill *)newFill { if ( decreaseFill != newFill ) { decreaseFill = [newFill copy]; [self setNeedsDisplay]; [[NSNotificationCenter defaultCenter] postNotificationName:CPTLegendNeedsRedrawForPlotNotification object:self]; } } -(void)setBarWidth:(CGFloat)newWidth { if ( barWidth != newWidth ) { barWidth = newWidth; [self setNeedsDisplay]; [[NSNotificationCenter defaultCenter] postNotificationName:CPTLegendNeedsRedrawForPlotNotification object:self]; } } -(void)setStickLength:(CGFloat)newLength { if ( stickLength != newLength ) { stickLength = newLength; [self setNeedsDisplay]; [[NSNotificationCenter defaultCenter] postNotificationName:CPTLegendNeedsRedrawForPlotNotification object:self]; } } -(void)setBarCornerRadius:(CGFloat)newBarCornerRadius { if ( barCornerRadius != newBarCornerRadius ) { barCornerRadius = newBarCornerRadius; [self setNeedsDisplay]; } } -(void)setShowBarBorder:(BOOL)newShowBarBorder { if ( showBarBorder != newShowBarBorder ) { showBarBorder = newShowBarBorder; [self setNeedsDisplay]; } } -(void)setXValues:(nullable CPTMutableNumericData *)newValues { [self cacheNumbers:newValues forField:CPTTradingRangePlotFieldX]; } -(nullable CPTMutableNumericData *)xValues { return [self cachedNumbersForField:CPTTradingRangePlotFieldX]; } -(nullable CPTMutableNumericData *)openValues { return [self cachedNumbersForField:CPTTradingRangePlotFieldOpen]; } -(void)setOpenValues:(nullable CPTMutableNumericData *)newValues { [self cacheNumbers:newValues forField:CPTTradingRangePlotFieldOpen]; } -(nullable CPTMutableNumericData *)highValues { return [self cachedNumbersForField:CPTTradingRangePlotFieldHigh]; } -(void)setHighValues:(nullable CPTMutableNumericData *)newValues { [self cacheNumbers:newValues forField:CPTTradingRangePlotFieldHigh]; } -(nullable CPTMutableNumericData *)lowValues { return [self cachedNumbersForField:CPTTradingRangePlotFieldLow]; } -(void)setLowValues:(nullable CPTMutableNumericData *)newValues { [self cacheNumbers:newValues forField:CPTTradingRangePlotFieldLow]; } -(nullable CPTMutableNumericData *)closeValues { return [self cachedNumbersForField:CPTTradingRangePlotFieldClose]; } -(void)setCloseValues:(nullable CPTMutableNumericData *)newValues { [self cacheNumbers:newValues forField:CPTTradingRangePlotFieldClose]; } -(nullable CPTFillArray *)increaseFills { return [self cachedArrayForKey:CPTTradingRangePlotBindingIncreaseFills]; } -(void)setIncreaseFills:(nullable CPTFillArray *)newFills { [self cacheArray:newFills forKey:CPTTradingRangePlotBindingIncreaseFills]; [self setNeedsDisplay]; } -(nullable CPTFillArray *)decreaseFills { return [self cachedArrayForKey:CPTTradingRangePlotBindingDecreaseFills]; } -(void)setDecreaseFills:(nullable CPTFillArray *)newFills { [self cacheArray:newFills forKey:CPTTradingRangePlotBindingDecreaseFills]; [self setNeedsDisplay]; } -(nullable CPTLineStyleArray *)lineStyles { return [self cachedArrayForKey:CPTTradingRangePlotBindingLineStyles]; } -(void)setLineStyles:(nullable CPTLineStyleArray *)newLineStyles { [self cacheArray:newLineStyles forKey:CPTTradingRangePlotBindingLineStyles]; [self setNeedsDisplay]; } -(nullable CPTLineStyleArray *)increaseLineStyles { return [self cachedArrayForKey:CPTTradingRangePlotBindingIncreaseLineStyles]; } -(void)setIncreaseLineStyles:(nullable CPTLineStyleArray *)newLineStyles { [self cacheArray:newLineStyles forKey:CPTTradingRangePlotBindingIncreaseLineStyles]; [self setNeedsDisplay]; } -(nullable CPTLineStyleArray *)decreaseLineStyles { return [self cachedArrayForKey:CPTTradingRangePlotBindingDecreaseLineStyles]; } -(void)setDecreaseLineStyles:(nullable CPTLineStyleArray *)newLineStyles { [self cacheArray:newLineStyles forKey:CPTTradingRangePlotBindingDecreaseLineStyles]; [self setNeedsDisplay]; } /// @endcond @end
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.6.0_27) on Thu Jan 23 20:22:10 EST 2014 --> <meta http-equiv="Content-Type" content="text/html" charset="utf-8"> <title>Uses of Class org.apache.solr.response.transform.ScoreAugmenter (Solr 4.6.1 API)</title> <meta name="date" content="2014-01-23"> <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.apache.solr.response.transform.ScoreAugmenter (Solr 4.6.1 API)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../org/apache/solr/response/transform/ScoreAugmenter.html" title="class in org.apache.solr.response.transform">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>PREV</li> <li>NEXT</li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?org/apache/solr/response/transform//class-useScoreAugmenter.html" target="_top">FRAMES</a></li> <li><a href="ScoreAugmenter.html" target="_top">NO FRAMES</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class org.apache.solr.response.transform.ScoreAugmenter" class="title">Uses of Class<br>org.apache.solr.response.transform.ScoreAugmenter</h2> </div> <div class="classUseContainer">No usage of org.apache.solr.response.transform.ScoreAugmenter</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../org/apache/solr/response/transform/ScoreAugmenter.html" title="class in org.apache.solr.response.transform">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>PREV</li> <li>NEXT</li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?org/apache/solr/response/transform//class-useScoreAugmenter.html" target="_top">FRAMES</a></li> <li><a href="ScoreAugmenter.html" target="_top">NO FRAMES</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small> <i>Copyright &copy; 2000-2014 Apache Software Foundation. All Rights Reserved.</i> <script src='../../../../../../prettify.js' type='text/javascript'></script> <script type='text/javascript'> (function(){ var oldonload = window.onload; if (typeof oldonload != 'function') { window.onload = prettyPrint; } else { window.onload = function() { oldonload(); prettyPrint(); } } })(); </script> </small></p> </body> </html>
module Sns::PublicNoticeFilter extend ActiveSupport::Concern included do model Sys::Notice end def show raise "403" unless @item = @model.and_public.find(params[:id]) render end def frame_content raise "403" unless @item = @model.and_public.find(params[:id]) render file: "frame_content", layout: false end end
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
package org.apache.activemq.usecases; import javax.jms.Connection; import javax.jms.Session; import javax.management.ObjectName; import java.io.File; import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.activemq.broker.BrokerFactory; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.command.ActiveMQTopic; import org.apache.activemq.leveldb.LevelDBStore; public class ManagedDurableSubscriptionTest extends org.apache.activemq.TestSupport { BrokerService broker = null; Connection connection = null; ActiveMQTopic topic; public void testJMXSubscriptions() throws Exception { // create durable subscription Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); session.createDurableSubscriber(topic, "SubsId"); session.close(); // restart the broker stopBroker(); startBroker(); ObjectName inactiveSubscriptionObjectName = broker.getAdminView().getInactiveDurableTopicSubscribers()[0]; Object inactive = broker.getManagementContext().getAttribute(inactiveSubscriptionObjectName, "Active"); assertTrue("Subscription is active.", Boolean.FALSE.equals(inactive)); // activate session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); session.createDurableSubscriber(topic, "SubsId"); ObjectName activeSubscriptionObjectName = broker.getAdminView().getDurableTopicSubscribers()[0]; Object active = broker.getManagementContext().getAttribute(activeSubscriptionObjectName, "Active"); assertTrue("Subscription is INactive.", Boolean.TRUE.equals(active)); // deactivate connection.close(); connection = null; inactive = broker.getManagementContext().getAttribute(inactiveSubscriptionObjectName, "Active"); assertTrue("Subscription is active.", Boolean.FALSE.equals(inactive)); } private void startBroker() throws Exception { broker = BrokerFactory.createBroker("broker:(vm://localhost)"); broker.setKeepDurableSubsActive(false); broker.setPersistent(true); LevelDBStore persistenceAdapter = new LevelDBStore(); persistenceAdapter.setDirectory(new File("activemq-data/" + getName())); broker.setPersistenceAdapter(persistenceAdapter); broker.setUseJmx(true); broker.getManagementContext().setCreateConnector(false); broker.setBrokerName(getName()); broker.start(); connection = createConnection(); } private void stopBroker() throws Exception { if (connection != null) connection.close(); connection = null; if (broker != null) broker.stop(); broker = null; } @Override protected ActiveMQConnectionFactory createConnectionFactory() throws Exception { return new ActiveMQConnectionFactory("vm://" + getName() + "?waitForStart=5000&create=false"); } @Override protected void setUp() throws Exception { super.setUp(); topic = (ActiveMQTopic) createDestination(); startBroker(); } @Override protected void tearDown() throws Exception { stopBroker(); super.tearDown(); } @Override protected Connection createConnection() throws Exception { Connection rc = super.createConnection(); rc.setClientID(getName()); rc.start(); return rc; } }
<html> <head> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <script type="text/javascript"> function onBtClick () { var msg = document.getElementById("concole_msg").value document.title = msg } </script> </head> <form name="console_form"> <input type="text" id="concole_msg" /><br/> <input type="button" onclick="onBtClick()" value="Change Title"/> </form> </body> </html>
package com.navercorp.pinpoint.test.junit4; import java.lang.reflect.Method; import com.navercorp.pinpoint.profiler.context.module.DefaultApplicationContext; import org.junit.internal.runners.model.EachTestNotifier; import org.junit.runner.notification.RunNotifier; import org.junit.runners.BlockJUnit4ClassRunner; import org.junit.runners.model.FrameworkMethod; import org.junit.runners.model.InitializationError; import org.junit.runners.model.Statement; import org.junit.runners.model.TestClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.navercorp.pinpoint.bootstrap.context.SpanRecorder; import com.navercorp.pinpoint.bootstrap.context.Trace; import com.navercorp.pinpoint.bootstrap.context.TraceContext; import com.navercorp.pinpoint.common.trace.ServiceType; /** * @author hyungil.jeong * @author emeroad */ public final class PinpointJUnit4ClassRunner extends BlockJUnit4ClassRunner { private static final Logger logger = LoggerFactory.getLogger(PinpointJUnit4ClassRunner.class); private static TestContext testContext; public PinpointJUnit4ClassRunner(Class<?> clazz) throws InitializationError { super(clazz); if (logger.isDebugEnabled()) { logger.debug("PinpointJUnit4ClassRunner constructor called with [{}].", clazz); } } private void beforeTestClass(Class<?> testClass) { try { // TODO fix static TestContext if (testContext == null) { logger.debug("traceContext is null"); TestClassWrapper testClassWrapper = new TestClassWrapper(testClass); testContext = new TestContext(testClassWrapper); } } catch (Throwable ex) { throw new RuntimeException(ex.getMessage(), ex); } } protected TestClass createTestClass(Class<?> testClass) { logger.debug("createTestClass {}", testClass); beforeTestClass(testClass); return testContext.createTestClass(testClass); } private TraceContext getTraceContext() { DefaultApplicationContext mockApplicationContext = testContext.getDefaultApplicationContext(); return mockApplicationContext.getTraceContext(); } @Override protected void runChild(FrameworkMethod method, RunNotifier notifier) { beginTracing(method); final Thread thread = Thread.currentThread(); final ClassLoader originalClassLoader = thread.getContextClassLoader(); try { thread.setContextClassLoader(testContext.getClassLoader()); super.runChild(method, notifier); } finally { thread.setContextClassLoader(originalClassLoader); endTracing(method, notifier); } } private void beginTracing(FrameworkMethod method) { if (shouldCreateNewTraceObject(method)) { TraceContext traceContext = getTraceContext(); Trace trace = traceContext.newTraceObject(); SpanRecorder recorder = trace.getSpanRecorder(); recorder.recordServiceType(ServiceType.TEST); } } private void endTracing(FrameworkMethod method, RunNotifier notifier) { if (shouldCreateNewTraceObject(method)) { TraceContext traceContext = getTraceContext(); try { Trace trace = traceContext.currentRawTraceObject(); if (trace == null) { // Trace is already detached from the ThreadLocal storage. // Happens when root trace method is tested without @IsRootSpan. EachTestNotifier testMethodNotifier = new EachTestNotifier(notifier, super.describeChild(method)); String traceObjectAlreadyDetachedMessage = "Trace object already detached. If you're testing a trace root, please add @IsRootSpan to the test method"; testMethodNotifier.addFailure(new IllegalStateException(traceObjectAlreadyDetachedMessage)); } else { trace.close(); } } finally { traceContext.removeTraceObject(); } } } private boolean shouldCreateNewTraceObject(FrameworkMethod method) { IsRootSpan isRootSpan = method.getAnnotation(IsRootSpan.class); return isRootSpan == null || !isRootSpan.value(); } @Override protected Statement methodInvoker(FrameworkMethod method, Object test) { return super.methodInvoker(method, test); } @Override protected Statement withBefores(FrameworkMethod method, final Object target, Statement statement) { Statement before = super.withBefores(method, target, statement); BeforeCallbackStatement callbackStatement = new BeforeCallbackStatement(before, new Statement() { @Override public void evaluate() throws Throwable { setupBaseTest(target); } }); return callbackStatement; } private void setupBaseTest(Object test) { logger.debug("setupBaseTest"); // It's safe to cast final Class<?> baseTestClass = testContext.getBaseTestClass(); if (baseTestClass.isInstance(test)) { try { Method reset = baseTestClass.getDeclaredMethod("setup", TestContext.class); reset.invoke(test, testContext); } catch (Exception e) { throw new RuntimeException("setCurrentHolder Error. Caused by:" + e.getMessage(), e); } } } @Override protected Statement withBeforeClasses(Statement statement) { final Statement beforeClasses = super.withBeforeClasses(statement); final BeforeCallbackStatement beforeCallbackStatement = new BeforeCallbackStatement(beforeClasses, new Statement() { @Override public void evaluate() throws Throwable { beforeClass(); } }); return ContextClassLoaderStatement.wrap(beforeCallbackStatement, testContext.getClassLoader()); } public void beforeClass() throws Throwable { logger.debug("beforeClass"); // TODO MockApplicationContext.start(); } @Override protected Statement withAfterClasses(Statement statement) { final Statement afterClasses = super.withAfterClasses(statement); final AfterCallbackStatement afterCallbackStatement = new AfterCallbackStatement(afterClasses, new Statement() { @Override public void evaluate() throws Throwable { afterClass(); } }); return ContextClassLoaderStatement.wrap(afterCallbackStatement, testContext.getClassLoader()); } public void afterClass() throws Throwable { logger.debug("afterClass"); // TODO MockApplicationContext.close() } }
""" Support for Homematic devices. For more details about this component, please refer to the documentation at https://home-assistant.io/components/homematic/ """ import os import time import logging from datetime import timedelta from functools import partial import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, STATE_UNKNOWN, CONF_USERNAME, CONF_PASSWORD, CONF_PLATFORM, CONF_HOSTS, CONF_NAME, ATTR_ENTITY_ID) from homeassistant.helpers import discovery from homeassistant.helpers.entity import Entity from homeassistant.helpers.event import track_time_interval from homeassistant.config import load_yaml_config_file DOMAIN = 'homematic' REQUIREMENTS = ["pyhomematic==0.1.22"] SCAN_INTERVAL_HUB = timedelta(seconds=300) SCAN_INTERVAL_VARIABLES = timedelta(seconds=30) DISCOVER_SWITCHES = 'homematic.switch' DISCOVER_LIGHTS = 'homematic.light' DISCOVER_SENSORS = 'homematic.sensor' DISCOVER_BINARY_SENSORS = 'homematic.binary_sensor' DISCOVER_COVER = 'homematic.cover' DISCOVER_CLIMATE = 'homematic.climate' ATTR_DISCOVER_DEVICES = 'devices' ATTR_PARAM = 'param' ATTR_CHANNEL = 'channel' ATTR_NAME = 'name' ATTR_ADDRESS = 'address' ATTR_VALUE = 'value' ATTR_PROXY = 'proxy' EVENT_KEYPRESS = 'homematic.keypress' EVENT_IMPULSE = 'homematic.impulse' SERVICE_VIRTUALKEY = 'virtualkey' SERVICE_RECONNECT = 'reconnect' SERVICE_SET_VAR_VALUE = 'set_var_value' SERVICE_SET_DEV_VALUE = 'set_dev_value' HM_DEVICE_TYPES = { DISCOVER_SWITCHES: [ 'Switch', 'SwitchPowermeter', 'IOSwitch', 'IPSwitch', 'IPSwitchPowermeter', 'KeyMatic', 'HMWIOSwitch'], DISCOVER_LIGHTS: ['Dimmer', 'KeyDimmer'], DISCOVER_SENSORS: [ 'SwitchPowermeter', 'Motion', 'MotionV2', 'RemoteMotion', 'MotionIP', 'ThermostatWall', 'AreaThermostat', 'RotaryHandleSensor', 'WaterSensor', 'PowermeterGas', 'LuxSensor', 'WeatherSensor', 'WeatherStation', 'ThermostatWall2', 'TemperatureDiffSensor', 'TemperatureSensor', 'CO2Sensor', 'IPSwitchPowermeter', 'HMWIOSwitch'], DISCOVER_CLIMATE: [ 'Thermostat', 'ThermostatWall', 'MAXThermostat', 'ThermostatWall2', 'MAXWallThermostat', 'IPThermostat'], DISCOVER_BINARY_SENSORS: [ 'ShutterContact', 'Smoke', 'SmokeV2', 'Motion', 'MotionV2', 'RemoteMotion', 'WeatherSensor', 'TiltSensor', 'IPShutterContact', 'HMWIOSwitch', 'MaxShutterContact'], DISCOVER_COVER: ['Blind', 'KeyBlind'] } HM_IGNORE_DISCOVERY_NODE = [ 'ACTUAL_TEMPERATURE', 'ACTUAL_HUMIDITY' ] HM_ATTRIBUTE_SUPPORT = { 'LOWBAT': ['battery', {0: 'High', 1: 'Low'}], 'ERROR': ['sabotage', {0: 'No', 1: 'Yes'}], 'RSSI_DEVICE': ['rssi', {}], 'VALVE_STATE': ['valve', {}], 'BATTERY_STATE': ['battery', {}], 'CONTROL_MODE': ['mode', {0: 'Auto', 1: 'Manual', 2: 'Away', 3: 'Boost'}], 'POWER': ['power', {}], 'CURRENT': ['current', {}], 'VOLTAGE': ['voltage', {}], 'WORKING': ['working', {0: 'No', 1: 'Yes'}], } HM_PRESS_EVENTS = [ 'PRESS_SHORT', 'PRESS_LONG', 'PRESS_CONT', 'PRESS_LONG_RELEASE', 'PRESS', ] HM_IMPULSE_EVENTS = [ 'SEQUENCE_OK', ] _LOGGER = logging.getLogger(__name__) CONF_RESOLVENAMES_OPTIONS = [ 'metadata', 'json', 'xml', False ] DATA_HOMEMATIC = 'homematic' DATA_DELAY = 'homematic_delay' DATA_DEVINIT = 'homematic_devinit' DATA_STORE = 'homematic_store' CONF_LOCAL_IP = 'local_ip' CONF_LOCAL_PORT = 'local_port' CONF_IP = 'ip' CONF_PORT = 'port' CONF_CALLBACK_IP = "callback_ip" CONF_CALLBACK_PORT = "callback_port" CONF_RESOLVENAMES = 'resolvenames' CONF_VARIABLES = 'variables' CONF_DEVICES = 'devices' CONF_DELAY = 'delay' CONF_PRIMARY = 'primary' DEFAULT_LOCAL_IP = "0.0.0.0" DEFAULT_LOCAL_PORT = 0 DEFAULT_RESOLVENAMES = False DEFAULT_PORT = 2001 DEFAULT_USERNAME = "Admin" DEFAULT_PASSWORD = "" DEFAULT_VARIABLES = False DEFAULT_DEVICES = True DEFAULT_DELAY = 0.5 DEFAULT_PRIMARY = False DEVICE_SCHEMA = vol.Schema({ vol.Required(CONF_PLATFORM): "homematic", vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_ADDRESS): cv.string, vol.Required(ATTR_PROXY): cv.string, vol.Optional(ATTR_CHANNEL, default=1): vol.Coerce(int), vol.Optional(ATTR_PARAM): cv.string, }) CONFIG_SCHEMA = vol.Schema({ DOMAIN: vol.Schema({ vol.Required(CONF_HOSTS): {cv.match_all: { vol.Required(CONF_IP): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string, vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string, vol.Optional(CONF_VARIABLES, default=DEFAULT_VARIABLES): cv.boolean, vol.Optional(CONF_RESOLVENAMES, default=DEFAULT_RESOLVENAMES): vol.In(CONF_RESOLVENAMES_OPTIONS), vol.Optional(CONF_DEVICES, default=DEFAULT_DEVICES): cv.boolean, vol.Optional(CONF_PRIMARY, default=DEFAULT_PRIMARY): cv.boolean, vol.Optional(CONF_CALLBACK_IP): cv.string, vol.Optional(CONF_CALLBACK_PORT): cv.port, }}, vol.Optional(CONF_LOCAL_IP, default=DEFAULT_LOCAL_IP): cv.string, vol.Optional(CONF_LOCAL_PORT, default=DEFAULT_LOCAL_PORT): cv.port, vol.Optional(CONF_DELAY, default=DEFAULT_DELAY): vol.Coerce(float), }), }, extra=vol.ALLOW_EXTRA) SCHEMA_SERVICE_VIRTUALKEY = vol.Schema({ vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper), vol.Required(ATTR_CHANNEL): vol.Coerce(int), vol.Required(ATTR_PARAM): cv.string, vol.Optional(ATTR_PROXY): cv.string, }) SCHEMA_SERVICE_SET_VAR_VALUE = vol.Schema({ vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_VALUE): cv.match_all, vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, }) SCHEMA_SERVICE_SET_DEV_VALUE = vol.Schema({ vol.Required(ATTR_ADDRESS): vol.All(cv.string, vol.Upper), vol.Required(ATTR_CHANNEL): vol.Coerce(int), vol.Required(ATTR_PARAM): vol.All(cv.string, vol.Upper), vol.Required(ATTR_VALUE): cv.match_all, vol.Optional(ATTR_PROXY): cv.string, }) SCHEMA_SERVICE_RECONNECT = vol.Schema({}) def virtualkey(hass, address, channel, param, proxy=None): """Send virtual keypress to homematic controlller.""" data = { ATTR_ADDRESS: address, ATTR_CHANNEL: channel, ATTR_PARAM: param, ATTR_PROXY: proxy, } hass.services.call(DOMAIN, SERVICE_VIRTUALKEY, data) def set_var_value(hass, entity_id, value): """Change value of homematic system variable.""" data = { ATTR_ENTITY_ID: entity_id, ATTR_VALUE: value, } hass.services.call(DOMAIN, SERVICE_SET_VAR_VALUE, data) def set_dev_value(hass, address, channel, param, value, proxy=None): """Send virtual keypress to homematic controlller.""" data = { ATTR_ADDRESS: address, ATTR_CHANNEL: channel, ATTR_PARAM: param, ATTR_VALUE: value, ATTR_PROXY: proxy, } hass.services.call(DOMAIN, SERVICE_SET_DEV_VALUE, data) def reconnect(hass): """Reconnect to CCU/Homegear.""" hass.services.call(DOMAIN, SERVICE_RECONNECT, {}) # pylint: disable=unused-argument def setup(hass, config): """Setup the Homematic component.""" from pyhomematic import HMConnection hass.data[DATA_DELAY] = config[DOMAIN].get(CONF_DELAY) hass.data[DATA_DEVINIT] = {} hass.data[DATA_STORE] = [] # create hosts list for pyhomematic remotes = {} hosts = {} for rname, rconfig in config[DOMAIN][CONF_HOSTS].items(): server = rconfig.get(CONF_IP) remotes[rname] = {} remotes[rname][CONF_IP] = server remotes[rname][CONF_PORT] = rconfig.get(CONF_PORT) remotes[rname][CONF_RESOLVENAMES] = rconfig.get(CONF_RESOLVENAMES) remotes[rname][CONF_USERNAME] = rconfig.get(CONF_USERNAME) remotes[rname][CONF_PASSWORD] = rconfig.get(CONF_PASSWORD) remotes[rname]['callbackip'] = rconfig.get(CONF_CALLBACK_IP) remotes[rname]['callbackport'] = rconfig.get(CONF_CALLBACK_PORT) if server not in hosts or rconfig.get(CONF_PRIMARY): hosts[server] = { CONF_VARIABLES: rconfig.get(CONF_VARIABLES), CONF_NAME: rname, } hass.data[DATA_DEVINIT][rname] = rconfig.get(CONF_DEVICES) # Create server thread bound_system_callback = partial(_system_callback_handler, hass, config) hass.data[DATA_HOMEMATIC] = HMConnection( local=config[DOMAIN].get(CONF_LOCAL_IP), localport=config[DOMAIN].get(CONF_LOCAL_PORT), remotes=remotes, systemcallback=bound_system_callback, interface_id="homeassistant" ) # Start server thread, connect to peer, initialize to receive events hass.data[DATA_HOMEMATIC].start() # Stops server when Homeassistant is shutting down hass.bus.listen_once( EVENT_HOMEASSISTANT_STOP, hass.data[DATA_HOMEMATIC].stop) # init homematic hubs entity_hubs = [] for _, hub_data in hosts.items(): entity_hubs.append(HMHub( hass, hub_data[CONF_NAME], hub_data[CONF_VARIABLES])) # regeister homematic services descriptions = load_yaml_config_file( os.path.join(os.path.dirname(__file__), 'services.yaml')) def _hm_service_virtualkey(service): """Service handle virtualkey services.""" address = service.data.get(ATTR_ADDRESS) channel = service.data.get(ATTR_CHANNEL) param = service.data.get(ATTR_PARAM) # device not found hmdevice = _device_from_servicecall(hass, service) if hmdevice is None: _LOGGER.error("%s not found for service virtualkey!", address) return # if param exists for this device if param not in hmdevice.ACTIONNODE: _LOGGER.error("%s not datapoint in hm device %s", param, address) return # channel exists? if channel not in hmdevice.ACTIONNODE[param]: _LOGGER.error("%i is not a channel in hm device %s", channel, address) return # call key hmdevice.actionNodeData(param, True, channel) hass.services.register( DOMAIN, SERVICE_VIRTUALKEY, _hm_service_virtualkey, descriptions[DOMAIN][SERVICE_VIRTUALKEY], schema=SCHEMA_SERVICE_VIRTUALKEY) def _service_handle_value(service): """Set value on homematic variable.""" entity_ids = service.data.get(ATTR_ENTITY_ID) name = service.data[ATTR_NAME] value = service.data[ATTR_VALUE] if entity_ids: entities = [entity for entity in entity_hubs if entity.entity_id in entity_ids] else: entities = entity_hubs if not entities: _LOGGER.error("Homematic controller not found!") return for hub in entities: hub.hm_set_variable(name, value) hass.services.register( DOMAIN, SERVICE_SET_VAR_VALUE, _service_handle_value, descriptions[DOMAIN][SERVICE_SET_VAR_VALUE], schema=SCHEMA_SERVICE_SET_VAR_VALUE) def _service_handle_reconnect(service): """Reconnect to all homematic hubs.""" hass.data[DATA_HOMEMATIC].reconnect() hass.services.register( DOMAIN, SERVICE_RECONNECT, _service_handle_reconnect, descriptions[DOMAIN][SERVICE_RECONNECT], schema=SCHEMA_SERVICE_RECONNECT) def _service_handle_device(service): """Service handle set_dev_value services.""" address = service.data.get(ATTR_ADDRESS) channel = service.data.get(ATTR_CHANNEL) param = service.data.get(ATTR_PARAM) value = service.data.get(ATTR_VALUE) # device not found hmdevice = _device_from_servicecall(hass, service) if hmdevice is None: _LOGGER.error("%s not found!", address) return # call key hmdevice.setValue(param, value, channel) hass.services.register( DOMAIN, SERVICE_SET_DEV_VALUE, _service_handle_device, descriptions[DOMAIN][SERVICE_SET_DEV_VALUE], schema=SCHEMA_SERVICE_SET_DEV_VALUE) return True def _system_callback_handler(hass, config, src, *args): """Callback handler.""" if src == 'newDevices': _LOGGER.debug("newDevices with: %s", args) # pylint: disable=unused-variable (interface_id, dev_descriptions) = args proxy = interface_id.split('-')[-1] # device support active? if not hass.data[DATA_DEVINIT][proxy]: return ## # Get list of all keys of the devices (ignoring channels) key_dict = {} for dev in dev_descriptions: key_dict[dev['ADDRESS'].split(':')[0]] = True ## # remove device they allready init by HA tmp_devs = key_dict.copy() for dev in tmp_devs: if dev in hass.data[DATA_STORE]: del key_dict[dev] else: hass.data[DATA_STORE].append(dev) # Register EVENTS # Search all device with a EVENTNODE that include data bound_event_callback = partial(_hm_event_handler, hass, proxy) for dev in key_dict: hmdevice = hass.data[DATA_HOMEMATIC].devices[proxy].get(dev) # have events? if len(hmdevice.EVENTNODE) > 0: _LOGGER.debug("Register Events from %s", dev) hmdevice.setEventCallback(callback=bound_event_callback, bequeath=True) # If configuration allows autodetection of devices, # all devices not configured are added. if key_dict: for component_name, discovery_type in ( ('switch', DISCOVER_SWITCHES), ('light', DISCOVER_LIGHTS), ('cover', DISCOVER_COVER), ('binary_sensor', DISCOVER_BINARY_SENSORS), ('sensor', DISCOVER_SENSORS), ('climate', DISCOVER_CLIMATE)): # Get all devices of a specific type found_devices = _get_devices( hass, discovery_type, key_dict, proxy) # When devices of this type are found # they are setup in HA and an event is fired if found_devices: # Fire discovery event discovery.load_platform(hass, component_name, DOMAIN, { ATTR_DISCOVER_DEVICES: found_devices }, config) def _get_devices(hass, discovery_type, keys, proxy): """Get the Homematic devices for given discovery_type.""" device_arr = [] for key in keys: device = hass.data[DATA_HOMEMATIC].devices[proxy][key] class_name = device.__class__.__name__ metadata = {} # Class supported by discovery type if class_name not in HM_DEVICE_TYPES[discovery_type]: continue # Load metadata if needed to generate a param list if discovery_type == DISCOVER_SENSORS: metadata.update(device.SENSORNODE) elif discovery_type == DISCOVER_BINARY_SENSORS: metadata.update(device.BINARYNODE) else: metadata.update({None: device.ELEMENT}) if metadata: # Generate options for 1...n elements with 1...n params for param, channels in metadata.items(): if param in HM_IGNORE_DISCOVERY_NODE: continue # Add devices _LOGGER.debug("%s: Handling %s: %s: %s", discovery_type, key, param, channels) for channel in channels: name = _create_ha_name( name=device.NAME, channel=channel, param=param, count=len(channels) ) device_dict = { CONF_PLATFORM: "homematic", ATTR_ADDRESS: key, ATTR_PROXY: proxy, ATTR_NAME: name, ATTR_CHANNEL: channel } if param is not None: device_dict[ATTR_PARAM] = param # Add new device try: DEVICE_SCHEMA(device_dict) device_arr.append(device_dict) except vol.MultipleInvalid as err: _LOGGER.error("Invalid device config: %s", str(err)) else: _LOGGER.debug("Got no params for %s", key) _LOGGER.debug("%s autodiscovery done: %s", discovery_type, str(device_arr)) return device_arr def _create_ha_name(name, channel, param, count): """Generate a unique object name.""" # HMDevice is a simple device if count == 1 and param is None: return name # Has multiple elements/channels if count > 1 and param is None: return "{} {}".format(name, channel) # With multiple param first elements if count == 1 and param is not None: return "{} {}".format(name, param) # Multiple param on object with multiple elements if count > 1 and param is not None: return "{} {} {}".format(name, channel, param) def _hm_event_handler(hass, proxy, device, caller, attribute, value): """Handle all pyhomematic device events.""" try: channel = int(device.split(":")[1]) address = device.split(":")[0] hmdevice = hass.data[DATA_HOMEMATIC].devices[proxy].get(address) except (TypeError, ValueError): _LOGGER.error("Event handling channel convert error!") return # is not a event? if attribute not in hmdevice.EVENTNODE: return _LOGGER.debug("Event %s for %s channel %i", attribute, hmdevice.NAME, channel) # keypress event if attribute in HM_PRESS_EVENTS: hass.add_job(hass.bus.async_fire(EVENT_KEYPRESS, { ATTR_NAME: hmdevice.NAME, ATTR_PARAM: attribute, ATTR_CHANNEL: channel })) return # impulse event if attribute in HM_IMPULSE_EVENTS: hass.add_job(hass.bus.async_fire(EVENT_KEYPRESS, { ATTR_NAME: hmdevice.NAME, ATTR_CHANNEL: channel })) return _LOGGER.warning("Event is unknown and not forwarded to HA") def _device_from_servicecall(hass, service): """Extract homematic device from service call.""" address = service.data.get(ATTR_ADDRESS) proxy = service.data.get(ATTR_PROXY) if proxy: return hass.data[DATA_HOMEMATIC].devices[proxy].get(address) for _, devices in hass.data[DATA_HOMEMATIC].devices.items(): if address in devices: return devices[address] class HMHub(Entity): """The Homematic hub. I.e. CCU2/HomeGear.""" def __init__(self, hass, name, use_variables): """Initialize Homematic hub.""" self.hass = hass self.entity_id = "{}.{}".format(DOMAIN, name.lower()) self._homematic = hass.data[DATA_HOMEMATIC] self._variables = {} self._name = name self._state = STATE_UNKNOWN self._use_variables = use_variables # load data track_time_interval(hass, self._update_hub, SCAN_INTERVAL_HUB) self._update_hub(None) if self._use_variables: track_time_interval( hass, self._update_variables, SCAN_INTERVAL_VARIABLES) self._update_variables(None) @property def name(self): """Return the name of the device.""" return self._name @property def should_poll(self): """Return false. Homematic Hub object update variable.""" return False @property def state(self): """Return the state of the entity.""" return self._state @property def state_attributes(self): """Return the state attributes.""" attr = self._variables.copy() return attr @property def icon(self): """Return the icon to use in the frontend, if any.""" return "mdi:gradient" def _update_hub(self, now): """Retrieve latest state.""" state = self._homematic.getServiceMessages(self._name) self._state = STATE_UNKNOWN if state is None else len(state) self.schedule_update_ha_state() def _update_variables(self, now): """Retrive all variable data and update hmvariable states.""" variables = self._homematic.getAllSystemVariables(self._name) if variables is None: return state_change = False for key, value in variables.items(): if key in self._variables and value == self._variables[key]: continue state_change = True self._variables.update({key: value}) if state_change: self.schedule_update_ha_state() def hm_set_variable(self, name, value): """Set variable on homematic controller.""" if name not in self._variables: _LOGGER.error("Variable %s not found on %s", name, self.name) return old_value = self._variables.get(name) if isinstance(old_value, bool): value = cv.boolean(value) else: value = float(value) self._homematic.setSystemVariable(self.name, name, value) self._variables.update({name: value}) self.schedule_update_ha_state() class HMDevice(Entity): """The Homematic device base object.""" def __init__(self, hass, config): """Initialize a generic Homematic device.""" self.hass = hass self._homematic = hass.data[DATA_HOMEMATIC] self._name = config.get(ATTR_NAME) self._address = config.get(ATTR_ADDRESS) self._proxy = config.get(ATTR_PROXY) self._channel = config.get(ATTR_CHANNEL) self._state = config.get(ATTR_PARAM) self._data = {} self._hmdevice = None self._connected = False self._available = False # Set param to uppercase if self._state: self._state = self._state.upper() @property def should_poll(self): """Return false. Homematic states are pushed by the XML RPC Server.""" return False @property def name(self): """Return the name of the device.""" return self._name @property def assumed_state(self): """Return true if unable to access real state of the device.""" return not self._available @property def available(self): """Return true if device is available.""" return self._available @property def device_state_attributes(self): """Return device specific state attributes.""" attr = {} # no data available to create if not self.available: return attr # Generate an attributes list for node, data in HM_ATTRIBUTE_SUPPORT.items(): # Is an attributes and exists for this object if node in self._data: value = data[1].get(self._data[node], self._data[node]) attr[data[0]] = value # static attributes attr['id'] = self._hmdevice.ADDRESS attr['proxy'] = self._proxy return attr def link_homematic(self): """Connect to Homematic.""" # device is already linked if self._connected: return True # Init self._hmdevice = self._homematic.devices[self._proxy][self._address] self._connected = True # Check if Homematic class is okay for HA class _LOGGER.info("Start linking %s to %s", self._address, self._name) try: # Init datapoints of this object self._init_data() if self.hass.data[DATA_DELAY]: # We delay / pause loading of data to avoid overloading # of CCU / Homegear when doing auto detection time.sleep(self.hass.data[DATA_DELAY]) self._load_data_from_hm() _LOGGER.debug("%s datastruct: %s", self._name, str(self._data)) # Link events from pyhomatic self._subscribe_homematic_events() self._available = not self._hmdevice.UNREACH _LOGGER.debug("%s linking done", self._name) # pylint: disable=broad-except except Exception as err: self._connected = False _LOGGER.error("Exception while linking %s: %s", self._address, str(err)) def _hm_event_callback(self, device, caller, attribute, value): """Handle all pyhomematic device events.""" _LOGGER.debug("%s received event '%s' value: %s", self._name, attribute, value) have_change = False # Is data needed for this instance? if attribute in self._data: # Did data change? if self._data[attribute] != value: self._data[attribute] = value have_change = True # If available it has changed if attribute == 'UNREACH': self._available = bool(value) have_change = True # If it has changed data point, update HA if have_change: _LOGGER.debug("%s update_ha_state after '%s'", self._name, attribute) self.schedule_update_ha_state() def _subscribe_homematic_events(self): """Subscribe all required events to handle job.""" channels_to_sub = {0: True} # add channel 0 for UNREACH # Push data to channels_to_sub from hmdevice metadata for metadata in (self._hmdevice.SENSORNODE, self._hmdevice.BINARYNODE, self._hmdevice.ATTRIBUTENODE, self._hmdevice.WRITENODE, self._hmdevice.EVENTNODE, self._hmdevice.ACTIONNODE): for node, channels in metadata.items(): # Data is needed for this instance if node in self._data: # chan is current channel if len(channels) == 1: channel = channels[0] else: channel = self._channel # Prepare for subscription try: if int(channel) >= 0: channels_to_sub.update({int(channel): True}) except (ValueError, TypeError): _LOGGER.error("Invalid channel in metadata from %s", self._name) # Set callbacks for channel in channels_to_sub: _LOGGER.debug( "Subscribe channel %d from %s", channel, self._name) self._hmdevice.setEventCallback( callback=self._hm_event_callback, bequeath=False, channel=channel) def _load_data_from_hm(self): """Load first value from pyhomematic.""" if not self._connected: return False # Read data from pyhomematic for metadata, funct in ( (self._hmdevice.ATTRIBUTENODE, self._hmdevice.getAttributeData), (self._hmdevice.WRITENODE, self._hmdevice.getWriteData), (self._hmdevice.SENSORNODE, self._hmdevice.getSensorData), (self._hmdevice.BINARYNODE, self._hmdevice.getBinaryData)): for node in metadata: if metadata[node] and node in self._data: self._data[node] = funct(name=node, channel=self._channel) return True def _hm_set_state(self, value): """Set data to main datapoint.""" if self._state in self._data: self._data[self._state] = value def _hm_get_state(self): """Get data from main datapoint.""" if self._state in self._data: return self._data[self._state] return None def _init_data(self): """Generate a data dict (self._data) from the Homematic metadata.""" # Add all attributes to data dict for data_note in self._hmdevice.ATTRIBUTENODE: self._data.update({data_note: STATE_UNKNOWN}) # init device specified data self._init_data_struct() def _init_data_struct(self): """Generate a data dict from the Homematic device metadata.""" raise NotImplementedError
<?php namespace Zend\EventManager; use Traversable; /** * A trait for objects that provide events */ trait ProvidesEvents { /** * @var EventManagerInterface */ protected $events; /** * Set the event manager instance used by this context * * @param EventManagerInterface $events * @return mixed */ public function setEventManager(EventManagerInterface $events) { $identifiers = array(__CLASS__, get_class($this)); if (isset($this->eventIdentifier)) { if ((is_string($this->eventIdentifier)) || (is_array($this->eventIdentifier)) || ($this->eventIdentifier instanceof Traversable) ) { $identifiers = array_unique(array_merge($identifiers, (array) $this->eventIdentifier)); } elseif (is_object($this->eventIdentifier)) { $identifiers[] = $this->eventIdentifier; } // silently ignore invalid eventIdentifier types } $events->setIdentifiers($identifiers); $this->events = $events; return $this; } /** * Retrieve the event manager * * Lazy-loads an EventManager instance if none registered. * * @return EventManagerInterface */ public function getEventManager() { if (!$this->events instanceof EventManagerInterface) { $this->setEventManager(new EventManager()); } return $this->events; } }
package uk.ac.warwick.dcs.SemEval.exceptions; public class WordRangeMapException extends Exception { /** * */ private static final long serialVersionUID = -2219029964655714398L; public WordRangeMapException(String message) { super(message); } }
// Catalano Math Library // The Catalano Framework // // Copyright © Diego Catalano, 2015 // diego.catalano at live.com // // Copyright © Andrew Kirillov, 2005-2009 // andrew.kirillov@aforgenet.com // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA // package cn.npt.util.algorithm.transform; /** * Complex Number. * @author Diego Catalano */ public class ComplexNumber { /** * Real. */ public double real = 0; /** * Imaginary. */ public double imaginary = 0; /** * Initializes a new instance of the ComplexNumber class. */ public ComplexNumber() { this(0,0); } /** * Initializes a new instance of the ComplexNumber class. * @param real Real. * @param imaginary Imaginary. */ public ComplexNumber(double real, double imaginary) { this.real = real; this.imaginary = imaginary; } /** * Initializes a new instance of the ComplexNumber class. * @param z1 Complex Number. */ public ComplexNumber(ComplexNumber z1){ this.real = z1.real; this.imaginary = z1.imaginary; } /** * Get Magnitude value of the complex number. * @return Magnitude. */ public double getMagnitude(){ return Math.sqrt(real*real + imaginary*imaginary); } /** * Get Squared magnitude value of the complex number. * @return squared magnitude. */ public double getSquaredMagnitude(){ return real*real + imaginary*imaginary; } /** * Get Phase value of the complex number. * @return Phase value. */ public double getPhase(){ return Math.atan2(imaginary,real); } /** * Get real part from the complex numbers. * @param cn Complex numbers. * @return Real part. */ public static double[] getReal(ComplexNumber[] cn){ double[] n = new double[cn.length]; for (int i = 0; i < n.length; i++) { n[i] = cn[i].real; } return n; } /** * Get imaginary part from the complex numbers. * @param cn Complex numbers. * @return Imaginary part. */ public static double[] getImaginary(ComplexNumber[] cn){ double[] n = new double[cn.length]; for (int i = 0; i < n.length; i++) { n[i] = cn[i].imaginary; } return n; } /** * Get real part from the complex numbers. * @param cn Complex numbers. * @return Real part. */ public static double[][] getReal(ComplexNumber[][] cn){ double[][] n = new double[cn.length][cn[0].length]; for (int i = 0; i < n.length; i++) { for (int j = 0; j < n[0].length; j++) { n[i][j] = cn[i][j].real; } } return n; } /** * Get imaginary part from the complex numbers. * @param cn Complex numbers. * @return Imaginary part. */ public static double[][] getImaginary(ComplexNumber[][] cn){ double[][] n = new double[cn.length][cn[0].length]; for (int i = 0; i < n.length; i++) { for (int j = 0; j < n[0].length; j++) { n[i][j] = cn[i][j].imaginary; } } return n; } /** * Swap values between real and imaginary. * @param z1 Complex number. */ public static void Swap(ComplexNumber z1){ double t = z1.real; z1.real = z1.imaginary; z1.imaginary = t; } /** * Swap values between real and imaginary. * @param z Complex number. */ public static void Swap(ComplexNumber[] z){ for (int i = 0; i < z.length; i++) { z[i] = new ComplexNumber(z[i].imaginary, z[i].real); } } /** * Swap values between real and imaginary. * @param z Complex number. */ public static void Swap(ComplexNumber[][] z){ for (int i = 0; i < z.length; i++) { for (int j = 0; j < z[0].length; j++) { z[i][j] = new ComplexNumber(z[i][j].imaginary, z[i][j].real); } } } /** * Absolute value of complex number. * Same result like magnitude. * @param z Complex Number. * @return Absolute number. */ public static double Abs(ComplexNumber z){ return Magnitude(z); } /** * Absolute value of complex number. * @param z Complex Numbers. * @return Absolute number. */ public static double[] Abs(ComplexNumber[] z){ double[] values = new double[z.length]; for (int i = 0; i < values.length; i++) { values[i] = z[i].getMagnitude(); } return values; } /** * Absolute value of complex number. * @param z Complex numbers. * @return Absolute number. */ public static double[][] Abs(ComplexNumber[][] z){ double[][] values = new double[z.length][z[0].length]; for (int i = 0; i < values.length; i++) { for (int j = 0; j < values[0].length; j++) { values[i][j] = z[i][j].getMagnitude(); } } return values; } /** * Adds two complex numbers. * @param z1 Complex Number. * @param z2 Complex Number. * @return Returns new ComplexNumber instance containing the sum of specified complex numbers. */ public static ComplexNumber Add(ComplexNumber z1, ComplexNumber z2){ return new ComplexNumber(z1.real + z2.real, z1.imaginary + z2.imaginary); } /** * Adds the complex number with a scalar value. * @param z1 Complex Number. * @param scalar Scalar value. * @return Returns new ComplexNumber instance containing the add of specified complex number with scalar value. */ public static ComplexNumber Add(ComplexNumber z1, double scalar){ return new ComplexNumber(z1.real + scalar, z1.imaginary); } /** * Adds scalar value to a complex number. * @param scalar Scalar value. */ public void Add(double scalar){ this.real += scalar; } /** * Subtract two complex numbers. * @param z1 Complex Number. * @param z2 Complex Number. * @return Returns new ComplexNumber instance containing the subtract of specified complex numbers. */ public static ComplexNumber Subtract(ComplexNumber z1, ComplexNumber z2){ return new ComplexNumber(z1.real - z2.real, z1.imaginary - z2.imaginary); } /** * Subtract a complex number. * @param z1 Complex Number. * @param scalar Scalar value. * @return Returns new ComplexNumber instance containing the subtract of specified complex number with a scalar value. */ public static ComplexNumber Subtract(ComplexNumber z1, double scalar){ return new ComplexNumber(z1.real - scalar, z1.imaginary); } /** * Subtracts scalar value to a complex number. * @param scalar Scalar value. */ public void Subtract(double scalar){ this.real -= scalar; } /** * Magnitude of complex number. * @param z Complex number. * @return Magnitude of complex number. */ public static double Magnitude(ComplexNumber z){ return Math.sqrt(z.real*z.real + z.imaginary*z.imaginary); } /** * Multiply two complex numbers. * @param z1 Complex Number. * @param z2 Complex Number. * @return Returns new ComplexNumber instance containing the multiply of specified complex numbers. */ public static ComplexNumber Multiply(ComplexNumber z1, ComplexNumber z2){ double z1R = z1.real, z1I = z1.imaginary; double z2R = z2.real, z2I = z2.imaginary; return new ComplexNumber(z1R * z2R - z1I * z2I, z1R * z2I + z1I * z2R); } /** * Multiply scalar value to a complex number. * @param z1 Complex Number. * @param scalar Scalar value. * @return Returns new ComplexNumber instance containing the multiply of specified complex number with the scalar value. */ public static ComplexNumber Multiply(ComplexNumber z1, double scalar){ return new ComplexNumber(z1.real*scalar, z1.imaginary*scalar); } /** * Multiplys scalar value to a complex number. * @param scalar Scalar value. */ public void Multiply(double scalar){ this.real *= scalar; this.imaginary *= scalar; } /** * Divide two complex numbers. * @param z1 Complex Number. * @param z2 Complex Number. * @return Returns new ComplexNumber instance containing the divide of specified complex numbers. */ public static ComplexNumber Divide(ComplexNumber z1, ComplexNumber z2){ ComplexNumber conj = ComplexNumber.Conjugate(z2); double a = z1.real * conj.real + ((z1.imaginary * conj.imaginary) * -1); double b = z1.real * conj.imaginary + (z1.imaginary * conj.real); double c = z2.real * conj.real + ((z2.imaginary * conj.imaginary) * -1); return new ComplexNumber(a / c, b / c); } /** * Divides scalar value to a complex number. * @param z1 Complex Number. */ public void Divide(ComplexNumber z1){ ComplexNumber conj = ComplexNumber.Conjugate(z1); double a = this.real * conj.real + ((this.imaginary * conj.imaginary) * -1); double b = this.real * conj.imaginary + (this.imaginary * conj.real); double c = z1.real * conj.real + ((z1.imaginary * conj.imaginary) * -1); this.real = a / c; this.imaginary = b / c; } /** * Divides scalar value to a complex number. * @param z1 Complex Number. * @param scalar Scalar value. * @return Returns new ComplexNumber instance containing the divide of specified complex number with the scalar value. */ public static ComplexNumber Divide(ComplexNumber z1, double scalar){ return new ComplexNumber(z1.real / scalar, z1.imaginary / scalar); } /** * Divides scalar value to a complex number. * @param scalar Scalar value. */ public void Divide(double scalar){ if (scalar == 0){ try { throw new ArithmeticException("Can not divide by zero."); } catch (Exception e) { e.printStackTrace(); } } this.real /= scalar; this.imaginary /= scalar; } /** * Calculate power of a complex number. * @param z1 Complex Number. * @param n Power. * @return Returns a new complex number containing the power of a specified number. */ public static ComplexNumber Pow(ComplexNumber z1, double n){ double norm = Math.pow(z1.getMagnitude(), n); double angle = 360 - Math.abs(Math.toDegrees(Math.atan(z1.imaginary/z1.real))); double common = n * angle; double r = norm * Math.cos(Math.toRadians(common)); double i = norm * Math.sin(Math.toRadians(common)); return new ComplexNumber(r, i); } /** * Calculate power of a complex number. * @param n Power. */ public void Pow(double n){ double norm = Math.pow(getMagnitude(), n); double angle = 360 - Math.abs(Math.toDegrees(Math.atan(this.imaginary/this.real))); double common = n * angle; this.real = norm * Math.cos(Math.toRadians(common)); this.imaginary = norm * Math.sin(Math.toRadians(common)); } /** * Calculates natural (base <b>e</b>) logarithm of a complex number. * @param z1 Complex Number instance. * @return Returns new ComplexNumber instance containing the natural logarithm of the specified complex number. */ public static ComplexNumber Log(ComplexNumber z1){ ComplexNumber result = new ComplexNumber(); if ( ( z1.real > 0.0 ) && ( z1.imaginary == 0.0 ) ){ result.real = Math.log( z1.real ); result.imaginary = 0.0; } else if ( z1.real == 0.0 ){ if ( z1.imaginary > 0.0 ){ result.real = Math.log( z1.imaginary ); result.imaginary = Math.PI / 2.0; } else{ result.real = Math.log( -( z1.imaginary ) ); result.imaginary = -Math.PI / 2.0; } } else{ result.real = Math.log( z1.getMagnitude() ); result.imaginary = Math.atan2( z1.imaginary, z1.real ); } return result; } /** * Calculates exponent (e raised to the specified power) of a complex number. * @param z1 A Complex Number instance. * @return Returns new ComplexNumber instance containing the exponent of the specified complex number. */ public static ComplexNumber Exp(ComplexNumber z1){ ComplexNumber x,y; x = new ComplexNumber(Math.exp(z1.real),0.0); y = new ComplexNumber(Math.cos(z1.imaginary),Math.sin(z1.imaginary)); return Multiply(x, y); } /** * Calculates Sine value of the complex number. * @param z1 A Complex Number instance. * @return Returns new ComplexNumber instance containing the Sine value of the specified complex number. */ public static ComplexNumber Sin(ComplexNumber z1){ ComplexNumber result = new ComplexNumber(); if ( z1.imaginary == 0.0 ) { result.real = Math.sin( z1.real ); result.imaginary = 0.0; } else { result.real = Math.sin( z1.real ) * Math.cosh( z1.imaginary ); result.imaginary = Math.cos( z1.real ) * Math.sinh( z1.imaginary ); } return result; } /** * Calculates Cosine value of the complex number. * @param z1 A ComplexNumber instance. * @return Returns new ComplexNumber instance containing the Cosine value of the specified complex number. */ public static ComplexNumber Cos(ComplexNumber z1){ ComplexNumber result = new ComplexNumber(); if ( z1.imaginary == 0.0 ) { result.real = Math.cos( z1.real ); result.imaginary = 0.0; } else { result.real = Math.cos( z1.real ) * Math.cosh( z1.imaginary ); result.imaginary = -Math.sin( z1.real ) * Math.sinh( z1.imaginary ); } return result; } /** * Calculates Tangent value of the complex number. * @param z1 A ComplexNumber instance. * @return Returns new ComplexNumber instance containing the Tangent value of the specified complex number. */ public static ComplexNumber Tan(ComplexNumber z1){ ComplexNumber result = new ComplexNumber(); if ( z1.imaginary == 0.0 ) { result.real = Math.tan( z1.real ); result.imaginary = 0.0; } else { double real2 = 2 * z1.real; double imag2 = 2 * z1.imaginary; double denom = Math.cos( real2 ) + Math.cosh( real2 ); result.real = Math.sin( real2 ) / denom; result.imaginary = Math.sinh( imag2 ) / denom; } return result; } /** * Conjugate this complex number. */ public void Conjugate(){ this.imaginary *= - 1; } /** * Conjugate a complex number. * @param z1 Complex number. * @return Returns new ComplexNumber instance containing the conjugate of the specified complex number. */ public static ComplexNumber Conjugate(ComplexNumber z1){ return new ComplexNumber(z1.real, z1.imaginary * - 1); } @Override public String toString() { if (this.imaginary >= 0) return this.real + " +" + this.imaginary + "i"; return this.real + " " + this.imaginary + "i"; } //public static double complex2Polar(ComplexNumber ) }
""" KubeVirt API This is KubeVirt API an add-on for Kubernetes. OpenAPI spec version: 1.0.0 Contact: kubevirt-dev@googlegroups.com Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class V1VirtualMachineInstanceMigrationState(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'abort_requested': 'bool', 'abort_status': 'str', 'completed': 'bool', 'end_timestamp': 'K8sIoApimachineryPkgApisMetaV1Time', 'failed': 'bool', 'migration_configuration': 'V1MigrationConfiguration', 'migration_policy_name': 'str', 'migration_uid': 'str', 'mode': 'str', 'source_node': 'str', 'start_timestamp': 'K8sIoApimachineryPkgApisMetaV1Time', 'target_attachment_pod_uid': 'str', 'target_cpu_set': 'list[int]', 'target_direct_migration_node_ports': 'dict(str, int)', 'target_node': 'str', 'target_node_address': 'str', 'target_node_domain_detected': 'bool', 'target_node_topology': 'str', 'target_pod': 'str' } attribute_map = { 'abort_requested': 'abortRequested', 'abort_status': 'abortStatus', 'completed': 'completed', 'end_timestamp': 'endTimestamp', 'failed': 'failed', 'migration_configuration': 'migrationConfiguration', 'migration_policy_name': 'migrationPolicyName', 'migration_uid': 'migrationUid', 'mode': 'mode', 'source_node': 'sourceNode', 'start_timestamp': 'startTimestamp', 'target_attachment_pod_uid': 'targetAttachmentPodUID', 'target_cpu_set': 'targetCPUSet', 'target_direct_migration_node_ports': 'targetDirectMigrationNodePorts', 'target_node': 'targetNode', 'target_node_address': 'targetNodeAddress', 'target_node_domain_detected': 'targetNodeDomainDetected', 'target_node_topology': 'targetNodeTopology', 'target_pod': 'targetPod' } def __init__(self, abort_requested=None, abort_status=None, completed=None, end_timestamp=None, failed=None, migration_configuration=None, migration_policy_name=None, migration_uid=None, mode=None, source_node=None, start_timestamp=None, target_attachment_pod_uid=None, target_cpu_set=None, target_direct_migration_node_ports=None, target_node=None, target_node_address=None, target_node_domain_detected=None, target_node_topology=None, target_pod=None): """ V1VirtualMachineInstanceMigrationState - a model defined in Swagger """ self._abort_requested = None self._abort_status = None self._completed = None self._end_timestamp = None self._failed = None self._migration_configuration = None self._migration_policy_name = None self._migration_uid = None self._mode = None self._source_node = None self._start_timestamp = None self._target_attachment_pod_uid = None self._target_cpu_set = None self._target_direct_migration_node_ports = None self._target_node = None self._target_node_address = None self._target_node_domain_detected = None self._target_node_topology = None self._target_pod = None if abort_requested is not None: self.abort_requested = abort_requested if abort_status is not None: self.abort_status = abort_status if completed is not None: self.completed = completed if end_timestamp is not None: self.end_timestamp = end_timestamp if failed is not None: self.failed = failed if migration_configuration is not None: self.migration_configuration = migration_configuration if migration_policy_name is not None: self.migration_policy_name = migration_policy_name if migration_uid is not None: self.migration_uid = migration_uid if mode is not None: self.mode = mode if source_node is not None: self.source_node = source_node if start_timestamp is not None: self.start_timestamp = start_timestamp if target_attachment_pod_uid is not None: self.target_attachment_pod_uid = target_attachment_pod_uid if target_cpu_set is not None: self.target_cpu_set = target_cpu_set if target_direct_migration_node_ports is not None: self.target_direct_migration_node_ports = target_direct_migration_node_ports if target_node is not None: self.target_node = target_node if target_node_address is not None: self.target_node_address = target_node_address if target_node_domain_detected is not None: self.target_node_domain_detected = target_node_domain_detected if target_node_topology is not None: self.target_node_topology = target_node_topology if target_pod is not None: self.target_pod = target_pod @property def abort_requested(self): """ Gets the abort_requested of this V1VirtualMachineInstanceMigrationState. Indicates that the migration has been requested to abort :return: The abort_requested of this V1VirtualMachineInstanceMigrationState. :rtype: bool """ return self._abort_requested @abort_requested.setter def abort_requested(self, abort_requested): """ Sets the abort_requested of this V1VirtualMachineInstanceMigrationState. Indicates that the migration has been requested to abort :param abort_requested: The abort_requested of this V1VirtualMachineInstanceMigrationState. :type: bool """ self._abort_requested = abort_requested @property def abort_status(self): """ Gets the abort_status of this V1VirtualMachineInstanceMigrationState. Indicates the final status of the live migration abortion :return: The abort_status of this V1VirtualMachineInstanceMigrationState. :rtype: str """ return self._abort_status @abort_status.setter def abort_status(self, abort_status): """ Sets the abort_status of this V1VirtualMachineInstanceMigrationState. Indicates the final status of the live migration abortion :param abort_status: The abort_status of this V1VirtualMachineInstanceMigrationState. :type: str """ self._abort_status = abort_status @property def completed(self): """ Gets the completed of this V1VirtualMachineInstanceMigrationState. Indicates the migration completed :return: The completed of this V1VirtualMachineInstanceMigrationState. :rtype: bool """ return self._completed @completed.setter def completed(self, completed): """ Sets the completed of this V1VirtualMachineInstanceMigrationState. Indicates the migration completed :param completed: The completed of this V1VirtualMachineInstanceMigrationState. :type: bool """ self._completed = completed @property def end_timestamp(self): """ Gets the end_timestamp of this V1VirtualMachineInstanceMigrationState. The time the migration action ended :return: The end_timestamp of this V1VirtualMachineInstanceMigrationState. :rtype: K8sIoApimachineryPkgApisMetaV1Time """ return self._end_timestamp @end_timestamp.setter def end_timestamp(self, end_timestamp): """ Sets the end_timestamp of this V1VirtualMachineInstanceMigrationState. The time the migration action ended :param end_timestamp: The end_timestamp of this V1VirtualMachineInstanceMigrationState. :type: K8sIoApimachineryPkgApisMetaV1Time """ self._end_timestamp = end_timestamp @property def failed(self): """ Gets the failed of this V1VirtualMachineInstanceMigrationState. Indicates that the migration failed :return: The failed of this V1VirtualMachineInstanceMigrationState. :rtype: bool """ return self._failed @failed.setter def failed(self, failed): """ Sets the failed of this V1VirtualMachineInstanceMigrationState. Indicates that the migration failed :param failed: The failed of this V1VirtualMachineInstanceMigrationState. :type: bool """ self._failed = failed @property def migration_configuration(self): """ Gets the migration_configuration of this V1VirtualMachineInstanceMigrationState. Migration configurations to apply :return: The migration_configuration of this V1VirtualMachineInstanceMigrationState. :rtype: V1MigrationConfiguration """ return self._migration_configuration @migration_configuration.setter def migration_configuration(self, migration_configuration): """ Sets the migration_configuration of this V1VirtualMachineInstanceMigrationState. Migration configurations to apply :param migration_configuration: The migration_configuration of this V1VirtualMachineInstanceMigrationState. :type: V1MigrationConfiguration """ self._migration_configuration = migration_configuration @property def migration_policy_name(self): """ Gets the migration_policy_name of this V1VirtualMachineInstanceMigrationState. Name of the migration policy. If string is empty, no policy is matched :return: The migration_policy_name of this V1VirtualMachineInstanceMigrationState. :rtype: str """ return self._migration_policy_name @migration_policy_name.setter def migration_policy_name(self, migration_policy_name): """ Sets the migration_policy_name of this V1VirtualMachineInstanceMigrationState. Name of the migration policy. If string is empty, no policy is matched :param migration_policy_name: The migration_policy_name of this V1VirtualMachineInstanceMigrationState. :type: str """ self._migration_policy_name = migration_policy_name @property def migration_uid(self): """ Gets the migration_uid of this V1VirtualMachineInstanceMigrationState. The VirtualMachineInstanceMigration object associated with this migration :return: The migration_uid of this V1VirtualMachineInstanceMigrationState. :rtype: str """ return self._migration_uid @migration_uid.setter def migration_uid(self, migration_uid): """ Sets the migration_uid of this V1VirtualMachineInstanceMigrationState. The VirtualMachineInstanceMigration object associated with this migration :param migration_uid: The migration_uid of this V1VirtualMachineInstanceMigrationState. :type: str """ self._migration_uid = migration_uid @property def mode(self): """ Gets the mode of this V1VirtualMachineInstanceMigrationState. Lets us know if the vmi is currently running pre or post copy migration :return: The mode of this V1VirtualMachineInstanceMigrationState. :rtype: str """ return self._mode @mode.setter def mode(self, mode): """ Sets the mode of this V1VirtualMachineInstanceMigrationState. Lets us know if the vmi is currently running pre or post copy migration :param mode: The mode of this V1VirtualMachineInstanceMigrationState. :type: str """ self._mode = mode @property def source_node(self): """ Gets the source_node of this V1VirtualMachineInstanceMigrationState. The source node that the VMI originated on :return: The source_node of this V1VirtualMachineInstanceMigrationState. :rtype: str """ return self._source_node @source_node.setter def source_node(self, source_node): """ Sets the source_node of this V1VirtualMachineInstanceMigrationState. The source node that the VMI originated on :param source_node: The source_node of this V1VirtualMachineInstanceMigrationState. :type: str """ self._source_node = source_node @property def start_timestamp(self): """ Gets the start_timestamp of this V1VirtualMachineInstanceMigrationState. The time the migration action began :return: The start_timestamp of this V1VirtualMachineInstanceMigrationState. :rtype: K8sIoApimachineryPkgApisMetaV1Time """ return self._start_timestamp @start_timestamp.setter def start_timestamp(self, start_timestamp): """ Sets the start_timestamp of this V1VirtualMachineInstanceMigrationState. The time the migration action began :param start_timestamp: The start_timestamp of this V1VirtualMachineInstanceMigrationState. :type: K8sIoApimachineryPkgApisMetaV1Time """ self._start_timestamp = start_timestamp @property def target_attachment_pod_uid(self): """ Gets the target_attachment_pod_uid of this V1VirtualMachineInstanceMigrationState. The UID of the target attachment pod for hotplug volumes :return: The target_attachment_pod_uid of this V1VirtualMachineInstanceMigrationState. :rtype: str """ return self._target_attachment_pod_uid @target_attachment_pod_uid.setter def target_attachment_pod_uid(self, target_attachment_pod_uid): """ Sets the target_attachment_pod_uid of this V1VirtualMachineInstanceMigrationState. The UID of the target attachment pod for hotplug volumes :param target_attachment_pod_uid: The target_attachment_pod_uid of this V1VirtualMachineInstanceMigrationState. :type: str """ self._target_attachment_pod_uid = target_attachment_pod_uid @property def target_cpu_set(self): """ Gets the target_cpu_set of this V1VirtualMachineInstanceMigrationState. If the VMI requires dedicated CPUs, this field will hold the dedicated CPU set on the target node :return: The target_cpu_set of this V1VirtualMachineInstanceMigrationState. :rtype: list[int] """ return self._target_cpu_set @target_cpu_set.setter def target_cpu_set(self, target_cpu_set): """ Sets the target_cpu_set of this V1VirtualMachineInstanceMigrationState. If the VMI requires dedicated CPUs, this field will hold the dedicated CPU set on the target node :param target_cpu_set: The target_cpu_set of this V1VirtualMachineInstanceMigrationState. :type: list[int] """ self._target_cpu_set = target_cpu_set @property def target_direct_migration_node_ports(self): """ Gets the target_direct_migration_node_ports of this V1VirtualMachineInstanceMigrationState. The list of ports opened for live migration on the destination node :return: The target_direct_migration_node_ports of this V1VirtualMachineInstanceMigrationState. :rtype: dict(str, int) """ return self._target_direct_migration_node_ports @target_direct_migration_node_ports.setter def target_direct_migration_node_ports(self, target_direct_migration_node_ports): """ Sets the target_direct_migration_node_ports of this V1VirtualMachineInstanceMigrationState. The list of ports opened for live migration on the destination node :param target_direct_migration_node_ports: The target_direct_migration_node_ports of this V1VirtualMachineInstanceMigrationState. :type: dict(str, int) """ self._target_direct_migration_node_ports = target_direct_migration_node_ports @property def target_node(self): """ Gets the target_node of this V1VirtualMachineInstanceMigrationState. The target node that the VMI is moving to :return: The target_node of this V1VirtualMachineInstanceMigrationState. :rtype: str """ return self._target_node @target_node.setter def target_node(self, target_node): """ Sets the target_node of this V1VirtualMachineInstanceMigrationState. The target node that the VMI is moving to :param target_node: The target_node of this V1VirtualMachineInstanceMigrationState. :type: str """ self._target_node = target_node @property def target_node_address(self): """ Gets the target_node_address of this V1VirtualMachineInstanceMigrationState. The address of the target node to use for the migration :return: The target_node_address of this V1VirtualMachineInstanceMigrationState. :rtype: str """ return self._target_node_address @target_node_address.setter def target_node_address(self, target_node_address): """ Sets the target_node_address of this V1VirtualMachineInstanceMigrationState. The address of the target node to use for the migration :param target_node_address: The target_node_address of this V1VirtualMachineInstanceMigrationState. :type: str """ self._target_node_address = target_node_address @property def target_node_domain_detected(self): """ Gets the target_node_domain_detected of this V1VirtualMachineInstanceMigrationState. The Target Node has seen the Domain Start Event :return: The target_node_domain_detected of this V1VirtualMachineInstanceMigrationState. :rtype: bool """ return self._target_node_domain_detected @target_node_domain_detected.setter def target_node_domain_detected(self, target_node_domain_detected): """ Sets the target_node_domain_detected of this V1VirtualMachineInstanceMigrationState. The Target Node has seen the Domain Start Event :param target_node_domain_detected: The target_node_domain_detected of this V1VirtualMachineInstanceMigrationState. :type: bool """ self._target_node_domain_detected = target_node_domain_detected @property def target_node_topology(self): """ Gets the target_node_topology of this V1VirtualMachineInstanceMigrationState. If the VMI requires dedicated CPUs, this field will hold the numa topology on the target node :return: The target_node_topology of this V1VirtualMachineInstanceMigrationState. :rtype: str """ return self._target_node_topology @target_node_topology.setter def target_node_topology(self, target_node_topology): """ Sets the target_node_topology of this V1VirtualMachineInstanceMigrationState. If the VMI requires dedicated CPUs, this field will hold the numa topology on the target node :param target_node_topology: The target_node_topology of this V1VirtualMachineInstanceMigrationState. :type: str """ self._target_node_topology = target_node_topology @property def target_pod(self): """ Gets the target_pod of this V1VirtualMachineInstanceMigrationState. The target pod that the VMI is moving to :return: The target_pod of this V1VirtualMachineInstanceMigrationState. :rtype: str """ return self._target_pod @target_pod.setter def target_pod(self, target_pod): """ Sets the target_pod of this V1VirtualMachineInstanceMigrationState. The target pod that the VMI is moving to :param target_pod: The target_pod of this V1VirtualMachineInstanceMigrationState. :type: str """ self._target_pod = target_pod def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, V1VirtualMachineInstanceMigrationState): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
package cli import ( "github.com/posener/complete" ) const ( // RunResultHelp is a value that can be returned from Run to signal // to the CLI to render the help output. RunResultHelp = -18511 ) // A command is a runnable sub-command of a CLI. type Command interface { // Help should return long-form help text that includes the command-line // usage, a brief few sentences explaining the function of the command, // and the complete list of flags the command accepts. Help() string // Run should run the actual command with the given CLI instance and // command-line arguments. It should return the exit status when it is // finished. // // There are a handful of special exit codes this can return documented // above that change behavior. Run(args []string) int // Synopsis should return a one-line, short synopsis of the command. // This should be less than 50 characters ideally. Synopsis() string } // CommandAutocomplete is an extension of Command that enables fine-grained // autocompletion. Subcommand autocompletion will work even if this interface // is not implemented. By implementing this interface, more advanced // autocompletion is enabled. type CommandAutocomplete interface { // AutocompleteArgs returns the argument predictor for this command. // If argument completion is not supported, this should return // complete.PredictNothing. AutocompleteArgs() complete.Predictor // AutocompleteFlags returns a mapping of supported flags and autocomplete // options for this command. The map key for the Flags map should be the // complete flag such as "-foo" or "--foo". AutocompleteFlags() complete.Flags } // CommandHelpTemplate is an extension of Command that also has a function // for returning a template for the help rather than the help itself. In // this scenario, both Help and HelpTemplate should be implemented. // // If CommandHelpTemplate isn't implemented, the Help is output as-is. type CommandHelpTemplate interface { // HelpTemplate is the template in text/template format to use for // displaying the Help. The keys available are: // // * ".Help" - The help text itself // * ".Subcommands" // HelpTemplate() string } // CommandFactory is a type of function that is a factory for commands. // We need a factory because we may need to setup some state on the // struct that implements the command itself. type CommandFactory func() (Command, error)
import unittest from django.contrib.auth.models import Group, AnonymousUser from django.core.exceptions import PermissionDenied from django.test import TestCase, RequestFactory from rest_framework.exceptions import NotFound from hs_core.hydroshare import resource from hs_core.hydroshare import users from hs_core.testing import MockIRODSTestCaseMixin from hs_core.views.utils import authorize, ACTION_TO_AUTHORIZE from hs_access_control.models import PrivilegeCodes class TestAuthorize(MockIRODSTestCaseMixin, TestCase): def setUp(self): super(TestAuthorize, self).setUp() self.group, _ = Group.objects.get_or_create(name='Hydroshare Author') # create a user - resource owner self.user = users.create_account( 'test_user@email.com', username='testuser', first_name='some_first_name', last_name='some_last_name', superuser=False, groups=[]) self.res = resource.create_resource( 'GenericResource', self.user, 'My Test Resource' ) self.request = RequestFactory().request() def test_authorize_owner(self): common_parameters = [ # resource owner has authorization for resource metadata view {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': True, 'exception': None}, # resource owner has authorization for resource view (metadata and content files) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': True, 'exception': None}, # resource owner has authorization for creating a new resource version {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.CREATE_RESOURCE_VERSION, 'success': True, 'exception': None} ] parameters = [ # resource owner has authorization for resource edit (metadata and content files) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.EDIT_RESOURCE, 'success': True, 'exception': None}, # resource owner has authorization for setting resource flags # (public, published/immutable, shareable etc) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.SET_RESOURCE_FLAG, 'success': True, 'exception': None}, # resource owner has authorization for resource delete {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.DELETE_RESOURCE, 'success': True, 'exception': None} ] + common_parameters self.request.user = self.user # test for private resource self.assertFalse(self.res.raccess.public) self.assertFalse(self.res.raccess.discoverable) self._run_tests(self.request, parameters) # test for discoverable resource self.assertFalse(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = True self.res.raccess.save() self._run_tests(self.request, parameters) # test for public resource self.assertTrue(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = False self.res.raccess.public = True self.res.raccess.save() self._run_tests(self.request, parameters) # test for immutable/published resource self.assertFalse(self.res.raccess.immutable) self.assertFalse(self.res.raccess.published) self.assertTrue(self.res.raccess.public) self.res.raccess.published = True self.res.raccess.immutable = True self.res.raccess.save() parameters = [ # resource owner has authorization for resource edit 4/9/2021 (metadata and content files) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.EDIT_RESOURCE, 'success': True, 'exception': None}, # resource owner has no authorization for setting resource # flags (public, immutable/published, shareable etc) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.SET_RESOURCE_FLAG, 'success': False, 'exception': PermissionDenied}, # resource owner has no authorization for deleting a published resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.DELETE_RESOURCE, 'success': False, 'exception': PermissionDenied} ] + common_parameters self._run_tests(self.request, parameters) def test_authorize_editor(self): # create edit_user edit_user = users.create_account( 'edit_user@email.com', username='edituser', first_name='edit_first_name', last_name='edit_last_name', superuser=False, groups=[]) self.request.user = edit_user common_parameters = [ # resource editor has authorization for resource view (metadata and content files) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': True, 'exception': None}, # resource editor has authorization for resource metadata view {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': True, 'exception': None}, # resource editor has no authorization for changing resource flags # (e.g., public, published/immutable, shareable etc) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.SET_RESOURCE_FLAG, 'success': False, 'exception': PermissionDenied}, # resource editor has no authorization for deleting a resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.DELETE_RESOURCE, 'success': False, 'exception': PermissionDenied}, # resource editor has no authorization for creating version of a resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.CREATE_RESOURCE_VERSION, 'success': False, 'exception': PermissionDenied} ] parameters = [ # resource editor has authorization for resource edit (metadata and content files) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.EDIT_RESOURCE, 'success': True, 'exception': None} ] + common_parameters # grant edit_user edit permission self.user.uaccess.share_resource_with_user(self.res, edit_user, PrivilegeCodes.CHANGE) # test for private resource self.assertFalse(self.res.raccess.public) self.assertFalse(self.res.raccess.discoverable) self._run_tests(self.request, parameters) # test for discoverable resource self.assertFalse(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = True self.res.raccess.save() self._run_tests(self.request, parameters) # test for public resource self.assertTrue(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = False self.res.raccess.public = True self.res.raccess.save() self._run_tests(self.request, parameters) # test for immutable/published resource self.assertFalse(self.res.raccess.immutable) self.assertFalse(self.res.raccess.published) self.assertTrue(self.res.raccess.public) self.res.raccess.immutable = True self.res.raccess.published = True self.res.raccess.save() parameters = [ # resource editor has no authorization for editing a published resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.EDIT_RESOURCE, 'success': False, 'exception': PermissionDenied} ] + common_parameters self._run_tests(self.request, parameters) def test_authorize_viewer(self): # create view_user view_user = users.create_account( 'view_user@email.com', username='viewuser', first_name='view_first_name', last_name='view_last_name', superuser=False, groups=[]) self.request.user = view_user parameters = [ # resource viewer has authorization for resource metadata view {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': True, 'exception': None}, # resource viewer has authorization for resource view (metadata and content files) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': True, 'exception': None}, # resource viewer has no authorization for editing a resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.EDIT_RESOURCE, 'success': False, 'exception': PermissionDenied}, # resource viewer has no authorization for changing resource flags # (e.g., public, published/immutable, shareable etc) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.SET_RESOURCE_FLAG, 'success': False, 'exception': PermissionDenied}, # resource viewer has no authorization for deleting a resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.DELETE_RESOURCE, 'success': False, 'exception': PermissionDenied}, # resource viewer has no authorization for creating a resource version {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.CREATE_RESOURCE_VERSION, 'success': False, 'exception': PermissionDenied} ] # grant view_user view permission self.user.uaccess.share_resource_with_user(self.res, view_user, PrivilegeCodes.VIEW) # test for private resource self.assertFalse(self.res.raccess.public) self.assertFalse(self.res.raccess.discoverable) self._run_tests(self.request, parameters) # test for discoverable resource self.assertFalse(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = True self.res.raccess.save() self._run_tests(self.request, parameters) # test for public resource self.assertTrue(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = False self.res.raccess.public = True self.res.raccess.save() self._run_tests(self.request, parameters) # test for immutable/published resource self.assertFalse(self.res.raccess.immutable) self.assertFalse(self.res.raccess.published) self.assertTrue(self.res.raccess.public) self.res.raccess.immutable = True self.res.raccess.published = True self.res.raccess.save() self._run_tests(self.request, parameters) def test_authorize_superuser(self): # create super user super_user = users.create_account( 'super_user@email.com', username='superuser', first_name='super_first_name', last_name='super_last_name', superuser=True, groups=[]) self.request.user = super_user common_parameters = [ # super user has authorization for resource metadata view for a resource # that is private or discoverable or public or published {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': True, 'exception': None}, # super user has authorization for resource view (both metadata and content files)for a resource # that is private or discoverable or public or published {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': True, 'exception': None}, # super user has authorization for editing a resource that is private or discoverable or public # or published {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.EDIT_RESOURCE, 'success': True, 'exception': None}, # super user has authorization for deleting a resource that is private or discoverable or public # or published {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.DELETE_RESOURCE, 'success': True, 'exception': None}, # super user has no authorization for creating a resource version {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.CREATE_RESOURCE_VERSION, 'success': False, 'exception': PermissionDenied} ] parameters = [ # super user has authorization for setting resource flags (e.g., public, # published/immutable, shareable etc) for a private, public or discoverable resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.SET_RESOURCE_FLAG, 'success': True, 'exception': None}, ] + common_parameters # test for private resource self.assertFalse(self.res.raccess.public) self.assertFalse(self.res.raccess.discoverable) self._run_tests(self.request, parameters) # test for discoverable resource self.assertFalse(self.res.raccess.discoverable) self.res.raccess.discoverable = True self.res.raccess.save() self._run_tests(self.request, parameters) # test for public resource self.assertTrue(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = False self.res.raccess.public = True self.res.raccess.save() self._run_tests(self.request, parameters) # test for immutable/published resource self.assertFalse(self.res.raccess.immutable) self.assertFalse(self.res.raccess.published) self.assertTrue(self.res.raccess.public) self.res.raccess.immutable = True self.res.raccess.published = True self.res.raccess.save() parameters = [ # super user has authorization for setting resource flags (e.g., public, # published/immutable, shareable etc) for a published resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.SET_RESOURCE_FLAG, 'success': True, 'exception': None}, ] + common_parameters self._run_tests(self.request, parameters) def test_authorize_anonymous_user(self): self.request.user = AnonymousUser() common_parameters = [ # anonymous user has no authorization for resource edit (metadata and content files) for a # private resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.EDIT_RESOURCE, 'success': False, 'exception': PermissionDenied}, # anonymous user has no authorization for deleting a private resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.DELETE_RESOURCE, 'success': False, 'exception': PermissionDenied}, # anonymous user has no authorization for setting resource flags for a private resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.SET_RESOURCE_FLAG, 'success': False, 'exception': PermissionDenied}, # anonymous user has no authorization for creating resource version {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.CREATE_RESOURCE_VERSION, 'success': False, 'exception': PermissionDenied} ] # test for private resource self.assertFalse(self.res.raccess.public) self.assertFalse(self.res.raccess.discoverable) self._run_tests(self.request, common_parameters) # test for discoverable resource self.res.raccess.discoverable = True self.res.raccess.public = False self.res.raccess.save() parameters = [ # anonymous user has authorization for metadata view for a resource that is discoverable {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': True, 'exception': None}, # anonymous user has no authorization for resource view (metadata and content files) for a # resource that is discoverable {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': False, 'exception': PermissionDenied} ] + common_parameters self._run_tests(self.request, parameters) # test for public resource self.assertTrue(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = False self.res.raccess.public = True self.res.raccess.save() parameters = [ # anonymous user has authorization for resource view (metadata and content files) for a # resource that is public {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': True, 'exception': None}, # anonymous user has authorization for metadata view for a resource that is public {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': True, 'exception': None} ] + common_parameters self._run_tests(self.request, parameters) # test for immutable/published resource self.assertFalse(self.res.raccess.immutable) self.assertFalse(self.res.raccess.published) self.assertTrue(self.res.raccess.public) self.res.raccess.immutable = True self.res.raccess.published = True self.res.raccess.save() parameters = [ # anonymous user has authorization for resource metadata view for a # published resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': True, 'exception': None}, # anonymous user has authorization for resource view (metadata and content files) for a # published resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': True, 'exception': None} ] + common_parameters self._run_tests(self.request, parameters) def test_authorize_user(self): # create user - has no assigned resource access privilege authenticated_user = users.create_account( 'user@email.com', username='user', first_name='user_first_name', last_name='user_last_name', superuser=False, groups=[]) self.request.user = authenticated_user common_parameters = [ # authenticated user (with no assigned access permission) has no authorization for editing a # resource (metadata and content files) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.EDIT_RESOURCE, 'success': False, 'exception': PermissionDenied}, # authenticated user (with no assigned access permission) has no authorization for # setting resource flags {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.SET_RESOURCE_FLAG, 'success': False, 'exception': PermissionDenied}, # authenticated user (with no assigned access permission) has no authorization for creating a # resource version {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.CREATE_RESOURCE_VERSION, 'success': False, 'exception': PermissionDenied}, # authenticated user (with no assigned access permission) has no authorization for deleting a # resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.DELETE_RESOURCE, 'success': False, 'exception': PermissionDenied} ] # test for private resource self.assertFalse(self.res.raccess.public) self.assertFalse(self.res.raccess.discoverable) parameters = [ # authenticated user (with no assigned access permission) has no authorization for viewing # metadata of a private resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': False, 'exception': PermissionDenied}, # authenticated user (with no assigned access permission) has no authorization for viewing a # private resource (both metadata and content files {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': False, 'exception': PermissionDenied} ] + common_parameters self._run_tests(self.request, parameters) # test for discoverable resource self.res.raccess.discoverable = True self.res.raccess.public = False self.res.raccess.save() parameters = [ # authenticated user (with no assigned access permission) has no authorization for resource view # (metadata and content files) for a discoverable resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': False, 'exception': PermissionDenied}, # authenticated (with no assigned access permission) user has authorization for resource metadata # only view for a discoverable resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': True, 'exception': None} ] + common_parameters self._run_tests(self.request, parameters) # test for public resource self.assertTrue(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = False self.res.raccess.public = True self.res.raccess.save() parameters = [ # authenticated user (with no assigned access permission) has authorization for resource view # (metadata and content files) for a public or published resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': True, 'exception': None}, # authenticated user (with no assigned access permission)has authorization for resource metadata # only view for a public or published resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': True, 'exception': None} ] + common_parameters self._run_tests(self.request, parameters) # test for immutable/published resource self.assertFalse(self.res.raccess.immutable) self.assertFalse(self.res.raccess.published) self.assertTrue(self.res.raccess.public) self.res.raccess.immutable = True self.res.raccess.published = True self.res.raccess.save() self._run_tests(self.request, parameters) def test_authorize_inactive_user(self): self.user.is_active = False self.user.save() self.request.user = self.user common_parameters = [ # resource inactive owner has no authorization for editing a resource (metadata and content files) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.EDIT_RESOURCE, 'success': False, 'exception': PermissionDenied}, # resource inactive owner has no authorization for setting resource flags # (public, published/immutable, shareable etc) {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.SET_RESOURCE_FLAG, 'success': False, 'exception': PermissionDenied}, # resource inactive owner has no authorization for creating a resource version {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.CREATE_RESOURCE_VERSION, 'success': False, 'exception': PermissionDenied}, # resource inactive owner has no authorization for deleting a resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.DELETE_RESOURCE, 'success': False, 'exception': PermissionDenied} ] # test for private resource self.assertFalse(self.res.raccess.public) self.assertFalse(self.res.raccess.discoverable) parameters = [ # resource inactive owner has no authorization for resource view (metadata and content files) # for a resource that is not discoverable {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': False, 'exception': PermissionDenied}, # resource inactive owner has no authorization for resource metadata view for a resource # that is not discoverable {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': False, 'exception': PermissionDenied} ] + common_parameters self._run_tests(self.request, parameters) # test for discoverable resource self.assertFalse(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = True self.res.raccess.save() parameters = [ # resource inactive owner has no authorization for resource view (metadata and content files) # for a resource that is discoverable {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': False, 'exception': PermissionDenied}, # resource inactive owner has authorization for resource metadata view for a resource that is # discoverable {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': True, 'exception': None} ] + common_parameters self._run_tests(self.request, parameters) # test for public resource self.assertTrue(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = False self.res.raccess.public = True self.res.raccess.save() parameters = [ # resource inactive owner has authorization for resource view (metadata and content files) for a # public resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': True, 'exception': None}, # resource inactive owner has authorization for resource metadata view for a public resource {'res_id': self.res.short_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': True, 'exception': None} ] + common_parameters self._run_tests(self.request, parameters) # test for immutable/published resource self.assertFalse(self.res.raccess.immutable) self.assertFalse(self.res.raccess.published) self.assertTrue(self.res.raccess.public) self.res.raccess.published = True self.res.raccess.immutable = True self.res.raccess.save() self._run_tests(self.request, parameters) def test_exception_notfound(self): invalid_res_id = '123x' parameters = [ {'res_id': invalid_res_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_METADATA, 'success': False, 'exception': NotFound}, {'res_id': invalid_res_id, 'needed_permission': ACTION_TO_AUTHORIZE.VIEW_RESOURCE, 'success': False, 'exception': NotFound}, {'res_id': invalid_res_id, 'needed_permission': ACTION_TO_AUTHORIZE.EDIT_RESOURCE, 'success': False, 'exception': NotFound}, {'res_id': invalid_res_id, 'needed_permission': ACTION_TO_AUTHORIZE.SET_RESOURCE_FLAG, 'success': False, 'exception': NotFound}, {'res_id': invalid_res_id, 'needed_permission': ACTION_TO_AUTHORIZE.CREATE_RESOURCE_VERSION, 'success': False, 'exception': NotFound}, {'res_id': invalid_res_id, 'needed_permission': ACTION_TO_AUTHORIZE.DELETE_RESOURCE, 'success': False, 'exception': NotFound} ] self.request.user = self.user self._run_tests(self.request, parameters) def test_default_parameters(self): # default permission is view # >> test private resource self.assertFalse(self.res.raccess.public) # test private link sharing is not enabled self.assertFalse(self.res.raccess.allow_private_sharing) # test owner self.request.user = self.user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test super user super_user = users.create_account( 'super_user@email.com', username='superuser', first_name='super_first_name', last_name='super_last_name', superuser=True, groups=[]) self.request.user = super_user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test edit user edit_user = users.create_account( 'edit_user@email.com', username='edituser', first_name='edit_first_name', last_name='edit_last_name', superuser=False, groups=[]) self.request.user = edit_user # grant edit_user edit permission self.user.uaccess.share_resource_with_user(self.res, edit_user, PrivilegeCodes.CHANGE) _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test view user view_user = users.create_account( 'view_user@email.com', username='viewuser', first_name='view_first_name', last_name='view_last_name', superuser=False, groups=[]) self.request.user = view_user # grant view_user view permission self.user.uaccess.share_resource_with_user(self.res, view_user, PrivilegeCodes.VIEW) _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test authenticated user with no granted permission authenticated_user = users.create_account( 'user@email.com', username='user', first_name='user_first_name', last_name='user_last_name', superuser=False, groups=[]) self.request.user = authenticated_user with self.assertRaises(PermissionDenied): authorize(self.request, res_id=self.res.short_id) # test anonymous user self.request.user = AnonymousUser() with self.assertRaises(PermissionDenied): authorize(self.request, res_id=self.res.short_id) # >> test for discoverable resource self.assertFalse(self.res.raccess.discoverable) self.assertFalse(self.res.raccess.allow_private_sharing) self.res.raccess.discoverable = True self.res.raccess.public = False self.res.raccess.save() # test owner self.request.user = self.user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test super user self.request.user = super_user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test edit user self.request.user = edit_user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test view user self.request.user = view_user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test authenticated user self.request.user = authenticated_user with self.assertRaises(PermissionDenied): authorize(self.request, res_id=self.res.short_id) # test anonymous user self.request.user = AnonymousUser() with self.assertRaises(PermissionDenied): authorize(self.request, res_id=self.res.short_id) # >> test for public resource self.assertFalse(self.res.raccess.public) self.res.raccess.discoverable = False self.res.raccess.public = True self.res.raccess.save() # test owner self.request.user = self.user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test super user self.request.user = super_user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test edit user self.request.user = edit_user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test view user self.request.user = view_user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test authenticated user self.request.user = authenticated_user authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test anonymous user self.request.user = AnonymousUser() authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # >> test for published resource self.assertFalse(self.res.raccess.published) self.res.raccess.published = False self.res.raccess.immutable = True self.res.raccess.save() # test owner self.request.user = self.user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test super user self.request.user = super_user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test edit user self.request.user = edit_user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test view user self.request.user = view_user _, authorized, _ = authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test authenticated user self.request.user = authenticated_user authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) # test anonymous user self.request.user = AnonymousUser() authorize(self.request, res_id=self.res.short_id) self.assertTrue(authorized) def test_raise_no_exception(self): # create user - has no assigned resource access privilege authenticated_user = users.create_account( 'user@email.com', username='user', first_name='user_first_name', last_name='user_last_name', superuser=False, groups=[]) self.request.user = authenticated_user res, authorized, user = authorize(self.request, res_id=self.res.short_id, needed_permission=ACTION_TO_AUTHORIZE.VIEW_RESOURCE, raises_exception=False) self.assertEqual(authorized, False) self.assertEqual(res, self.res) self.assertEqual(user, authenticated_user) def test_authorization_for_private_link_sharing(self): """Test that anonymous user is authorized to view resource when private share link enabled""" self.request.user = AnonymousUser() # check private link sharing is not enabled self.assertFalse(self.res.raccess.allow_private_sharing) _, authorized, _ = authorize(self.request, res_id=self.res.short_id, needed_permission=ACTION_TO_AUTHORIZE.VIEW_RESOURCE, raises_exception=False) # test anonymous user is not authorized to vew resource self.assertFalse(authorized) # enable private link sharing self.res.raccess.allow_private_sharing = True self.res.raccess.save() _, authorized, _ = authorize(self.request, res_id=self.res.short_id, needed_permission=ACTION_TO_AUTHORIZE.VIEW_RESOURCE, raises_exception=False) # test anonymous user is authorized to vew resource self.assertTrue(authorized) def test_return_data(self): # test authorization True self.request.user = self.user res, authorized, user = authorize(self.request, res_id=self.res.short_id, needed_permission=ACTION_TO_AUTHORIZE.VIEW_RESOURCE) self.assertEqual(authorized, True) self.assertEqual(res, self.res) self.assertEqual(user, self.user) # test authorization False anonymous_user = AnonymousUser() self.request.user = anonymous_user res, authorized, user = authorize(self.request, res_id=self.res.short_id, needed_permission=ACTION_TO_AUTHORIZE.VIEW_RESOURCE, raises_exception=False) self.assertEqual(authorized, False) self.assertEqual(res, self.res) self.assertEqual(user, anonymous_user) def _run_tests(self, request, parameters): for params in parameters: if params['exception'] is None: res, authorized, user = authorize(request, res_id=params['res_id'], needed_permission=params['needed_permission']) self.assertEqual(params['success'], authorized) else: with self.assertRaises(params['exception']): authorize(request, res_id=params['res_id'], needed_permission=params['needed_permission'])
<!-- JS Includes --> <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.7.2/jquery.min.js"></script> <script src="<?php echo JS_URL;?>jquery.tools.tooltips.min.js?v=1.0"></script> <script src="<?php echo JS_URL;?>global.js?v=1.0"></script>
@interface GrowlMusicVideoWindowView : GrowlNotificationView { NSImage *icon; NSString *title; NSString *text; NSDictionary *textAttributes; NSDictionary *titleAttributes; NSColor *textColor; NSColor *backgroundColor; CGLayerRef layer; NSImage *cache; BOOL needsDisplay; } - (void) setIcon:(NSImage *)icon; - (void) setTitle:(NSString *)title; - (void) setText:(NSString *)text; - (void) setPriority:(int)priority; - (id) target; - (void) setTarget:(id)object; - (SEL) action; - (void) setAction:(SEL)selector; @end
package phases import ( "fmt" "github.com/pkg/errors" "k8s.io/kubernetes/cmd/kubeadm/app/cmd/options" "k8s.io/kubernetes/cmd/kubeadm/app/cmd/phases/workflow" cmdutil "k8s.io/kubernetes/cmd/kubeadm/app/cmd/util" kubeadmconstants "k8s.io/kubernetes/cmd/kubeadm/app/constants" "k8s.io/kubernetes/cmd/kubeadm/app/phases/copycerts" ) // NewUploadCertsPhase returns the uploadCerts phase func NewUploadCertsPhase() workflow.Phase { return workflow.Phase{ Name: "upload-certs", Short: fmt.Sprintf("Upload certificates to %s", kubeadmconstants.KubeadmCertsSecret), Long: cmdutil.MacroCommandLongDescription, Run: runUploadCerts, InheritFlags: []string{ options.CfgPath, options.KubeconfigPath, options.UploadCerts, options.CertificateKey, options.SkipCertificateKeyPrint, }, } } func runUploadCerts(c workflow.RunData) error { data, ok := c.(InitData) if !ok { return errors.New("upload-certs phase invoked with an invalid data struct") } if !data.UploadCerts() { fmt.Printf("[upload-certs] Skipping phase. Please see --%s\n", options.UploadCerts) return nil } client, err := data.Client() if err != nil { return err } if len(data.CertificateKey()) == 0 { certificateKey, err := copycerts.CreateCertificateKey() if err != nil { return err } data.SetCertificateKey(certificateKey) } if err := copycerts.UploadCerts(client, data.Cfg(), data.CertificateKey()); err != nil { return errors.Wrap(err, "error uploading certs") } if !data.SkipCertificateKeyPrint() { fmt.Printf("[upload-certs] Using certificate key:\n%s\n", data.CertificateKey()) } return nil }
<?php /** * Message translations. * * This file is automatically generated by 'yii message/extract' command. * It contains the localizable messages extracted from source code. * You may modify this file by translating the extracted messages. * * Each array element represents the translation (value) of a message (key). * If the value is empty, the message is considered as not translated. * Messages that no longer need translation will have their translations * enclosed between a pair of '@@' marks. * * Message string can be used with plural forms format. Check i18n section * of the guide for details. * * NOTE: this file must be saved in UTF-8 encoding. */ return [ 'Actions' => 'Handlinger', 'Active' => 'Aktiv', 'Add Book' => 'Tilføj Bog', 'All' => '', 'Are you sure to delete this item?' => 'Er du sikker på at du vil slette dette element?', 'Book Listing' => 'Bogoversigt', 'CSV' => 'CSV', 'Clear selection' => '', 'Collapse All' => '', 'Collapse' => '', 'Comma Separated Values' => '', 'Delete' => 'Slet', 'Disable any popup blockers in your browser to ensure proper download.' => 'Deaktivér eventuelle popup blockers i din browser for at sikre korrekt download.', 'Download Selected' => 'Download Valgte', 'Excel' => 'Excel', 'Expand All' => '', 'Expand' => '', 'Export All Data' => '', 'Export Page Data' => 'Export Side Data', 'Export' => 'Eksportér', 'ExportWorksheet' => 'EksportérArbejdsark', 'Generated' => '', 'Generating the export file. Please wait...' => '', 'Grid Export' => '', 'HTML' => 'HTML', 'Hyper Text Markup Language' => '', 'Inactive' => 'Inaktiv', 'Invalid editable index or model form name' => '', 'Invalid or bad editable data' => '', 'JSON' => '', 'JavaScript Object Notation' => '', 'Library' => 'Bibliotek', 'Microsoft Excel 95+' => '', 'No data found' => 'Ingen data fundet', 'No valid editable model found' => '', 'Ok to proceed?' => '', 'PDF export generated by kartik-v/yii2-grid extension' => '', 'PDF' => '', 'Page' => '', 'Portable Document Format' => '', 'Request submitted! You may safely close this dialog after saving your downloaded file.' => '', 'Reset Grid' => 'Nulstil Tabel', 'Resize table columns just like a spreadsheet by dragging the column edges.' => 'Resize tabelkolonner ligesom et regneark ved at trække kolonnen kanter.', 'Show all data' => '', 'Show first page data' => '', 'Tab Delimited Text' => '', 'Text' => 'Tekst', 'The CSV export file will be generated for download.' => 'CSV eksportfilen vil blive genereret til download.', 'The EXCEL export file will be generated for download.' => 'EXCEL eksportfilen vil blive genereret til download.', 'The HTML export file will be generated for download.' => 'HTML eksportfilen vil blive genereret til download.', 'The JSON export file will be generated for download.' => '', 'The PDF export file will be generated for download.' => '', 'The TEXT export file will be generated for download.' => 'TEKST eksportfilen vil blive genereret til download.', 'The page summary displays SUM for first 3 amount columns and AVG for the last.' => 'Sideresuméet viser SUM for de første 3 kolonner og AVG for den sidste.', 'The table header sticks to the top in this demo as you scroll' => 'Tabeloverskriften fastgøres til toppen i denne demo, når du scroller', 'There are {totalCount} records. Are you sure you want to display them all?' => '', 'Update' => 'Opdatér', 'View' => 'Vis', 'Yii2 Grid Export (PDF)' => '', 'export' => 'exportér', 'grid-export' => 'tabel-eksport', 'krajee, grid, export, yii2-grid, pdf' => '', '© Krajee Yii2 Extensions' => '', ];
<html> <head> <title>Chrome History API Data Artifact</title> </head> <body> <p>This demo demonstrates an issue with Google Chrome versions 8-10 (possibly 11) where if you push a state with data, then do history.back to the initial state, the event.state will contain the pushed states data instead of being null.</p> <p>Note: The issue requires a clean history list, as such this should always be opened in a new tab/window where there are no prior history items.</p> <p>Reported by <a href="http://balupton.com">Benjamin Lupton</a> author of <a href="http://github.com/balupton/history.js">History.js</a></p> <button id="bug">bug</button> <button id="reset">reset</button> <textarea id="log" style="width:100%;height:200px;margin-top:1em;"></textarea> <script type="text/javascript"> (function(){ window.onpopstate = function(event) { var message = ("onpopstate: location: " + document.location.href + ", data: " + JSON.stringify(event.state)); document.getElementById('log').innerHTML += message+"\n\n"; }; document.getElementById('bug').onclick = function(){ setTimeout(function(){ history.pushState({state:'new'},'New State','?new'); },1e3); setTimeout(function(){ history.back(); },2e3); }; document.getElementById('reset').onclick = function(){ document.location.href = document.location.href.replace(/[\#\?].*/,""); }; })(); </script> </body> </html>
// Copyright 2022 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/download/bubble/download_bubble_controller.h" #include "base/files/file_path.h" #include "base/metrics/histogram_functions.h" #include "base/notreached.h" #include "base/time/time.h" #include "chrome/browser/browser_process.h" #include "chrome/browser/content_index/content_index_provider_impl.h" #include "chrome/browser/download/bubble/download_bubble_prefs.h" #include "chrome/browser/download/bubble/download_display_controller.h" #include "chrome/browser/download/chrome_download_manager_delegate.h" #include "chrome/browser/download/download_core_service.h" #include "chrome/browser/download/download_core_service_factory.h" #include "chrome/browser/download/download_item_model.h" #include "chrome/browser/download/offline_item_model_manager.h" #include "chrome/browser/download/offline_item_model_manager_factory.h" #include "chrome/browser/download/offline_item_utils.h" #include "chrome/browser/offline_items_collection/offline_content_aggregator_factory.h" #include "chrome/browser/profiles/profile_key.h" #include "chrome/browser/safe_browsing/download_protection/download_protection_service.h" #include "chrome/browser/safe_browsing/safe_browsing_service.h" #include "chrome/browser/ui/browser.h" #include "chrome/browser/ui/browser_finder.h" #include "components/download/public/common/download_item.h" #include "components/download/public/common/download_stats.h" #include "components/offline_items_collection/core/offline_content_aggregator.h" #include "content/public/browser/download_manager.h" using DownloadCreationType = ::download::DownloadItem::DownloadCreationType; namespace { constexpr int kShowDownloadsInBubbleForNumDays = 1; constexpr int kMaxDownloadsToShow = 100; bool FindOfflineItemByContentId(const ContentId& to_find, const OfflineItem& candidate) { return candidate.id == to_find; } bool DownloadUIModelIsRecent(const DownloadUIModel* model, base::Time cutoff_time) { return ((model->GetStartTime().is_null() && !model->IsDone()) || model->GetStartTime() > cutoff_time); } using DownloadUIModelPtrList = std::list<DownloadUIModelPtr>; // Sorting order is 1) Active in-progress downloads, 2) Paused in-progress // downloads, 3) Other downloads int GetSortOrder(DownloadUIModel* a) { if (a->GetState() == download::DownloadItem::IN_PROGRESS) { return a->IsPaused() ? 2 : 1; } return 3; } struct StartTimeComparator { bool operator()(const DownloadUIModelPtrList::iterator& a_iter, const DownloadUIModelPtrList::iterator& b_iter) const { DownloadUIModel* a = (*a_iter).get(); DownloadUIModel* b = (*b_iter).get(); int a_sort_order = GetSortOrder(a); int b_sort_order = GetSortOrder(b); if (a_sort_order < b_sort_order) { return true; } else if (a_sort_order > b_sort_order) { return false; } else { // For the same sort order, sub-order by reverse chronological order. return (a->GetStartTime() > b->GetStartTime()); } } }; using SortedDownloadUIModelSet = std::multiset<DownloadUIModelPtrList::iterator, StartTimeComparator>; bool AddModelIfRequired(DownloadUIModelPtr model, base::Time cutoff_time, std::vector<DownloadUIModelPtr>& models_aggregate) { if (model->ShouldShowInBubble() && DownloadUIModelIsRecent(model.get(), cutoff_time)) { models_aggregate.push_back(std::move(model)); return true; } return false; } bool ShouldStopAddingModels(std::vector<DownloadUIModelPtr>& models_aggregate) { return (models_aggregate.size() >= kMaxDownloadsToShow); } } // namespace DownloadBubbleUIController::DownloadBubbleUIController(Browser* browser) : browser_(browser), profile_(browser->profile()), download_manager_(profile_->GetDownloadManager()), download_notifier_(download_manager_, this), aggregator_(OfflineContentAggregatorFactory::GetForKey( profile_->GetProfileKey())), offline_manager_( OfflineItemModelManagerFactory::GetForBrowserContext(profile_)) { if (profile_->IsOffTheRecord()) { Profile* original_profile = profile_->GetOriginalProfile(); original_notifier_ = std::make_unique<download::AllDownloadItemNotifier>( original_profile->GetDownloadManager(), this); } observation_.Observe(aggregator_.get()); } DownloadBubbleUIController::~DownloadBubbleUIController() = default; void DownloadBubbleUIController::HideDownloadUi() { display_controller_->HideToolbarButton(); } void DownloadBubbleUIController::HandleButtonPressed() { display_controller_->HandleButtonPressed(); } bool DownloadBubbleUIController::MaybeAddOfflineItem(const OfflineItem& item, bool is_new) { if (profile_->IsOffTheRecord() != item.is_off_the_record) return false; if (OfflineItemUtils::IsDownload(item.id)) return false; if (item.state == OfflineItemState::CANCELLED) return false; if (item.id.name_space == ContentIndexProviderImpl::kProviderNamespace) return false; std::unique_ptr<DownloadUIModel> model( std::make_unique<OfflineItemModel>(offline_manager_, item)); if (!model->ShouldShowInBubble()) return false; offline_items_.push_back(item); if (is_new) { model->SetActionedOn(false); } return true; } void DownloadBubbleUIController::MaybeAddOfflineItems( base::OnceCallback<void()> callback, bool is_new, const OfflineItemList& offline_items) { for (const OfflineItem& item : offline_items) { MaybeAddOfflineItem(item, is_new); } std::move(callback).Run(); } void DownloadBubbleUIController::InitOfflineItems( DownloadDisplayController* display_controller, base::OnceCallback<void()> callback) { display_controller_ = display_controller; aggregator_->GetAllItems(base::BindOnce( &DownloadBubbleUIController::MaybeAddOfflineItems, weak_factory_.GetWeakPtr(), std::move(callback), /*is_new=*/false)); } const OfflineItemList& DownloadBubbleUIController::GetOfflineItems() { PruneOfflineItems(); return offline_items_; } const std::vector<download::DownloadItem*> DownloadBubbleUIController::GetDownloadItems() { std::vector<download::DownloadItem*> download_items; download_manager_->GetAllDownloads(&download_items); if (original_notifier_) { original_notifier_->GetManager()->GetAllDownloads(&download_items); } return download_items; } void DownloadBubbleUIController::OnManagerGoingDown( content::DownloadManager* manager) { if (manager == download_manager_) { download_manager_ = nullptr; } } void DownloadBubbleUIController::OnContentProviderGoingDown() { observation_.Reset(); } void DownloadBubbleUIController::OnItemsAdded( const OfflineContentProvider::OfflineItemList& items) { bool any_new = false; bool any_in_progress = false; for (const OfflineItem& item : items) { if (MaybeAddOfflineItem(item, /*is_new=*/true)) { if (item.state == OfflineItemState::IN_PROGRESS) { any_in_progress = true; } any_new = true; } } if (any_new) { display_controller_->OnNewItem(/*show_details=*/( any_in_progress && (browser_ == chrome::FindLastActiveWithProfile(profile_.get())))); } } void DownloadBubbleUIController::OnNewItem(download::DownloadItem* item, bool show_details) { std::make_unique<DownloadItemModel>(item)->SetActionedOn(false); display_controller_->OnNewItem( (item->GetState() == download::DownloadItem::IN_PROGRESS) && show_details); } bool DownloadBubbleUIController::ShouldShowIncognitoIcon( const DownloadUIModel* model) const { return download::IsDownloadBubbleV2Enabled(profile_) && model->GetDownloadItem() && model->GetDownloadItem()->IsOffTheRecord(); } void DownloadBubbleUIController::OnItemRemoved(const ContentId& id) { if (OfflineItemUtils::IsDownload(id)) return; offline_items_.erase( std::remove_if(offline_items_.begin(), offline_items_.end(), [&id](const OfflineItem& candidate) { return FindOfflineItemByContentId(id, candidate); }), offline_items_.end()); offline_manager_->RemoveOfflineItemModelData(id); display_controller_->OnRemovedItem(id); } void DownloadBubbleUIController::OnDownloadRemoved( content::DownloadManager* manager, download::DownloadItem* item) { std::make_unique<DownloadItemModel>(item)->SetActionedOn(true); const ContentId& id = OfflineItemUtils::GetContentIdForDownload(item); display_controller_->OnRemovedItem(id); } void DownloadBubbleUIController::OnItemUpdated( const OfflineItem& item, const absl::optional<UpdateDelta>& update_delta) { // Update item offline_items_.erase( std::remove_if(offline_items_.begin(), offline_items_.end(), [&item](const OfflineItem& candidate) { return FindOfflineItemByContentId(item.id, candidate); }), offline_items_.end()); bool was_added = MaybeAddOfflineItem(item, /*is_new=*/false); display_controller_->OnUpdatedItem( std::make_unique<OfflineItemModel>(offline_manager_, item)->IsDone(), was_added && (browser_ == chrome::FindLastActiveWithProfile(profile_.get()))); } void DownloadBubbleUIController::OnDownloadUpdated( content::DownloadManager* manager, download::DownloadItem* item) { // manager can be different from download_notifier_ when the current profile // is off the record. if (manager != download_notifier_.GetManager()) { display_controller_->OnUpdatedItem(item->IsDone(), /*show_details_if_done=*/false); return; } bool show_details_if_done = std::make_unique<DownloadItemModel>(item)->ShouldShowInBubble() && (browser_ == chrome::FindLastActiveWithProfile(profile_.get())); display_controller_->OnUpdatedItem(item->IsDone(), show_details_if_done); } void DownloadBubbleUIController::PruneOfflineItems() { base::Time cutoff_time = base::Time::Now() - base::Days(kShowDownloadsInBubbleForNumDays); for (auto item_iter = offline_items_.begin(); item_iter != offline_items_.end();) { std::unique_ptr<DownloadUIModel> offline_model = std::make_unique<OfflineItemModel>(offline_manager_, *item_iter); if (!DownloadUIModelIsRecent(offline_model.get(), cutoff_time)) { offline_model->SetActionedOn(true); item_iter = offline_items_.erase(item_iter); } else { item_iter++; } } } std::vector<DownloadUIModelPtr> DownloadBubbleUIController::GetAllItemsToDisplay() { base::Time cutoff_time = base::Time::Now() - base::Days(kShowDownloadsInBubbleForNumDays); std::vector<DownloadUIModelPtr> models_aggregate; for (const OfflineItem& item : GetOfflineItems()) { if (AddModelIfRequired( OfflineItemModel::Wrap( offline_manager_, item, std::make_unique<DownloadUIModel::BubbleStatusTextBuilder>()), cutoff_time, models_aggregate) && ShouldStopAddingModels(models_aggregate)) { return models_aggregate; } } for (download::DownloadItem* item : GetDownloadItems()) { if (AddModelIfRequired( DownloadItemModel::Wrap( item, std::make_unique<DownloadUIModel::BubbleStatusTextBuilder>()), cutoff_time, models_aggregate) && ShouldStopAddingModels(models_aggregate)) { return models_aggregate; } } return models_aggregate; } std::vector<DownloadUIModelPtr> DownloadBubbleUIController::GetDownloadUIModels( bool is_main_view) { // Prune just to keep the list of offline entries small. PruneOfflineItems(); // Aggregate downloads and offline items std::vector<DownloadUIModelPtr> models_aggregate = GetAllItemsToDisplay(); // Store list of DownloadUIModelPtrs. Sort list iterators in a set, as a set // does not allow move semantics over unique_ptr, preventing us from putting // DownloadUIModelPtr directly in the set. DownloadUIModelPtrList filtered_models_list; SortedDownloadUIModelSet sorted_ui_model_iters; for (auto& model : models_aggregate) { // Partial view entries are removed if viewed on the main view. if (is_main_view || !model->WasActionedOn()) { if (is_main_view) { model->SetActionedOn(true); } filtered_models_list.push_front(std::move(model)); sorted_ui_model_iters.insert(filtered_models_list.begin()); } } // Convert set iterators to sorted vector. std::vector<DownloadUIModelPtr> models_return_arr; for (const auto& model_iter : sorted_ui_model_iters) { models_return_arr.push_back(std::move((*model_iter))); } return models_return_arr; } std::vector<DownloadUIModelPtr> DownloadBubbleUIController::GetMainView() { if (last_partial_view_shown_time_.has_value()) { base::UmaHistogramLongTimes( "Download.Bubble.PartialToFullViewLatency", base::Time::Now() - (*last_partial_view_shown_time_)); last_partial_view_shown_time_ = absl::nullopt; } std::vector<DownloadUIModelPtr> list = GetDownloadUIModels(/*is_main_view=*/true); base::UmaHistogramCounts100("Download.Bubble.FullViewSize", list.size()); return list; } std::vector<DownloadUIModelPtr> DownloadBubbleUIController::GetPartialView() { last_partial_view_shown_time_ = absl::make_optional(base::Time::Now()); std::vector<DownloadUIModelPtr> list = GetDownloadUIModels(/*is_main_view=*/false); base::UmaHistogramCounts100("Download.Bubble.PartialViewSize", list.size()); return list; } void DownloadBubbleUIController::ProcessDownloadWarningButtonPress( DownloadUIModel* model, DownloadCommands::Command command) { DownloadCommands commands(model->GetWeakPtr()); DCHECK(command == DownloadCommands::KEEP || command == DownloadCommands::DISCARD); if (model->IsMixedContent()) commands.ExecuteCommand(command); else MaybeSubmitDownloadToFeedbackService(model, command); } void DownloadBubbleUIController::ProcessDownloadButtonPress( DownloadUIModel* model, DownloadCommands::Command command) { DownloadCommands commands(model->GetWeakPtr()); switch (command) { case DownloadCommands::KEEP: case DownloadCommands::DISCARD: ProcessDownloadWarningButtonPress(model, command); break; case DownloadCommands::REVIEW: #if 0 model->ReviewScanningVerdict( browser_->tab_strip_model()->GetActiveWebContents()); #endif break; case DownloadCommands::RETRY: RetryDownload(model, command); break; case DownloadCommands::CANCEL: model->SetActionedOn(true); [[fallthrough]]; case DownloadCommands::DEEP_SCAN: case DownloadCommands::BYPASS_DEEP_SCANNING: case DownloadCommands::RESUME: case DownloadCommands::PAUSE: case DownloadCommands::OPEN_WHEN_COMPLETE: case DownloadCommands::SHOW_IN_FOLDER: case DownloadCommands::ALWAYS_OPEN_TYPE: commands.ExecuteCommand(command); break; default: NOTREACHED() << "Unexpected button pressed on download bubble: " << command; } } void DownloadBubbleUIController::MaybeSubmitDownloadToFeedbackService( DownloadUIModel* model, DownloadCommands::Command command) { DownloadCommands commands(model->GetWeakPtr()); if (!model->ShouldAllowDownloadFeedback() || !SubmitDownloadToFeedbackService(model, command)) { commands.ExecuteCommand(command); } } bool DownloadBubbleUIController::SubmitDownloadToFeedbackService( DownloadUIModel* model, DownloadCommands::Command command) const { #if BUILDFLAG(FULL_SAFE_BROWSING) auto* const sb_service = g_browser_process->safe_browsing_service(); if (!sb_service) return false; auto* const dp_service = sb_service->download_protection_service(); if (!dp_service) return false; // TODO(shaktisahu): Enable feedback service for offline item. return !model->GetDownloadItem() || dp_service->MaybeBeginFeedbackForDownload( profile_, model->GetDownloadItem(), command); #else NOTREACHED(); return false; #endif } void DownloadBubbleUIController::RetryDownload( DownloadUIModel* model, DownloadCommands::Command command) { DCHECK(command == DownloadCommands::RETRY); display_controller_->HideBubble(); RecordDownloadRetry( OfflineItemUtils::ConvertFailStateToDownloadInterruptReason( model->GetLastFailState())); net::NetworkTrafficAnnotationTag traffic_annotation = net::DefineNetworkTrafficAnnotation("download_bubble_retry_download", R"( semantics { sender: "The download bubble" description: "Kick off retrying an interrupted download." trigger: "The user selects the retry button for an interrupted download on " "the downloads bubble." data: "None" destination: WEBSITE } policy { cookies_allowed: YES cookies_store: "user" setting: "This feature cannot be disabled by settings, but it's only " "triggered by user request." policy_exception_justification: "Not implemented." })"); // Use the last URL in the chain like resumption does. auto download_url_params = std::make_unique<download::DownloadUrlParameters>( model->GetURL(), traffic_annotation); // Set to false because user interaction is needed. download_url_params->set_content_initiated(false); download_url_params->set_download_source( download::DownloadSource::RETRY_FROM_BUBBLE); download_manager_->DownloadUrl(std::move(download_url_params)); } void DownloadBubbleUIController::ScheduleCancelForEphemeralWarning( const std::string& guid) { DownloadCoreService* download_core_service = DownloadCoreServiceFactory::GetForBrowserContext(profile_); if (!download_core_service) return; ChromeDownloadManagerDelegate* delegate = download_core_service->GetDownloadManagerDelegate(); if (delegate) delegate->ScheduleCancelForEphemeralWarning(guid); }
module MatzBot::Commands require 'open-uri' require 'rexml/document' GIT_URL = 'http://git.rubini.us/?p=code;a=atom' hup_proc = lambda { trap("HUP", "IGNORE") trap("HUP", hup_proc) } trap("HUP", hup_proc) abrt_proc = lambda { trap("ABRT", "IGNORE") trap("ABRT", abrt_proc) } trap("ABRT", abrt_proc) def update_git data = open(GIT_URL).read doc = REXML::Document.new(data) last_hash = session[:git_last_hash] person = nil top_hash = nil REXML::XPath.each(doc, "//entry") do |entry| title = REXML::XPath.first(entry, "./title") link = REXML::XPath.first(entry, "./link") name = REXML::XPath.first(entry, "./author/name") hash = link.attributes['href'].split("=").last top_hash = hash if top_hash.nil? break if hash == last_hash # we need to put the hast already in now, otherwise it might run the build twice. session[:git_last_hash] = top_hash person = name.text build = IO.popen("~/continuous/bin/rubinius.zsh #{hash}", "r+") { |p| p.read } unless build.empty? say "#{person}: #{hash[0..8]}; #{build}" #build.split("\n").map{|x| say " * " << x} end break # only run it for the very last commit end end Signal.trap("USR2") do update_git end end
module.exports = function ({data, methods, computed}) { data.bookshelf = [] data.bookshelf__search_term = '' methods.bookshelf__get = function (user_id) { let vm = this return req({ url: '/bookshelf/' + user_id, cookies: true, json: true }) } methods.bookshelf__findId = function (book_id) { let vm = this return vm.bookshelf.findIndex(function (b) { return b.book.id === book_id }) } methods.bookshelf__add = function (book_id) { let vm = this if (!vm.is_user) { return } w.req({ method: 'post', url: '/bookshelf__add', data: {book_id, user_id: vm.user._id}, cookies: true, json: true }).then(function (res) { vm.bookshelf__update(res) }) } methods.bookshelf__remove = function (book_id) { let vm = this if (!vm.is_user) { return } w.req({ method: 'post', url: '/bookshelf__remove', data: {book_id, user_id: vm.user._id}, cookies: true, json: true }).then(function (res) { if (res.err !== undefined) { return } var i = vm.bookshelf__findId(book_id) if (i !== -1) { vm.bookshelf.splice(i, 1) } }) } methods.bookshelf__update = function (b) { if (b === undefined) { return } let vm = this var i = vm.bookshelf__findId(b.book.id) if (i === -1) { vm.bookshelf.unshift(b) } if (i !== -1) { Vue.set(vm.bookshelf, i, b) } } methods.bookshelf__is_in_array = function (book_id) { let vm = this return vm.bookshelf__findId(book_id) !== -1 } methods.bookshelf__view = function (books, search_term) { let vm = this var search_term = search_term.toLowerCase().trim() var search_reg = search_term // var search_reg = search_term.split('').join('.*') return books.filter(function (b) { var book = b.book if (search_term.length === 0) { return true } const search_result = ['title', 'subtitle', 'authors'].reduce(function (bool, field) { if (bool === true) { return true } if (book[field] === undefined) { return false } if (field === 'authors') { return book.authors.join(', ').toLowerCase().match(search_reg) !== null } return book[field].toLowerCase().match(search_reg) !== null }, false) return search_result }) } methods.bookshelf_authors = function (book) { if (book.book.authors === undefined) { return '' } return book.book.authors.join(', ') } }
<?php namespace Predis\Replication; use Predis\NotSupportedException; use Predis\Command\CommandInterface; /** * Defines a strategy for master/reply replication. * * @author Daniele Alessandri <suppakilla@gmail.com> */ class ReplicationStrategy { protected $disallowed; protected $readonly; protected $readonlySHA1; /** * */ public function __construct() { $this->disallowed = $this->getDisallowedOperations(); $this->readonly = $this->getReadOnlyOperations(); $this->readonlySHA1 = array(); } /** * Returns if the specified command performs a read-only operation * against a key stored on Redis. * * @param CommandInterface $command Instance of Redis command. * @return Boolean */ public function isReadOperation(CommandInterface $command) { if (isset($this->disallowed[$id = $command->getId()])) { throw new NotSupportedException("The command $id is not allowed in replication mode"); } if (isset($this->readonly[$id])) { if (true === $readonly = $this->readonly[$id]) { return true; } return call_user_func($readonly, $command); } if (($eval = $id === 'EVAL') || $id === 'EVALSHA') { $sha1 = $eval ? sha1($command->getArgument(0)) : $command->getArgument(0); if (isset($this->readonlySHA1[$sha1])) { if (true === $readonly = $this->readonlySHA1[$sha1]) { return true; } return call_user_func($readonly, $command); } } return false; } /** * Returns if the specified command is disallowed in a master/slave * replication context. * * @param CommandInterface $command Instance of Redis command. * @return Boolean */ public function isDisallowedOperation(CommandInterface $command) { return isset($this->disallowed[$command->getId()]); } /** * Checks if a SORT command is a readable operation by parsing the arguments * array of the specified commad instance. * * @param CommandInterface $command Instance of Redis command. * @return Boolean */ protected function isSortReadOnly(CommandInterface $command) { $arguments = $command->getArguments(); return ($c = count($arguments)) === 1 ? true : $arguments[$c - 2] !== 'STORE'; } /** * Marks a command as a read-only operation. When the behaviour of a * command can be decided only at runtime depending on its arguments, * a callable object can be provided to dynamically check if the passed * instance of a command performs write operations or not. * * @param string $commandID ID of the command. * @param mixed $readonly A boolean or a callable object. */ public function setCommandReadOnly($commandID, $readonly = true) { $commandID = strtoupper($commandID); if ($readonly) { $this->readonly[$commandID] = $readonly; } else { unset($this->readonly[$commandID]); } } /** * Marks a Lua script for EVAL and EVALSHA as a read-only operation. When * the behaviour of a script can be decided only at runtime depending on * its arguments, a callable object can be provided to dynamically check * if the passed instance of EVAL or EVALSHA performs write operations or * not. * * @param string $script Body of the Lua script. * @param mixed $readonly A boolean or a callable object. */ public function setScriptReadOnly($script, $readonly = true) { $sha1 = sha1($script); if ($readonly) { $this->readonlySHA1[$sha1] = $readonly; } else { unset($this->readonlySHA1[$sha1]); } } /** * Returns the default list of disallowed commands. * * @return array */ protected function getDisallowedOperations() { return array( 'SHUTDOWN' => true, 'INFO' => true, 'DBSIZE' => true, 'LASTSAVE' => true, 'CONFIG' => true, 'MONITOR' => true, 'SLAVEOF' => true, 'SAVE' => true, 'BGSAVE' => true, 'BGREWRITEAOF' => true, 'SLOWLOG' => true, ); } /** * Returns the default list of commands performing read-only operations. * * @return array */ protected function getReadOnlyOperations() { return array( 'EXISTS' => true, 'TYPE' => true, 'KEYS' => true, 'SCAN' => true, 'RANDOMKEY' => true, 'TTL' => true, 'GET' => true, 'MGET' => true, 'SUBSTR' => true, 'STRLEN' => true, 'GETRANGE' => true, 'GETBIT' => true, 'LLEN' => true, 'LRANGE' => true, 'LINDEX' => true, 'SCARD' => true, 'SISMEMBER' => true, 'SINTER' => true, 'SUNION' => true, 'SDIFF' => true, 'SMEMBERS' => true, 'SSCAN' => true, 'SRANDMEMBER' => true, 'ZRANGE' => true, 'ZREVRANGE' => true, 'ZRANGEBYSCORE' => true, 'ZREVRANGEBYSCORE' => true, 'ZCARD' => true, 'ZSCORE' => true, 'ZCOUNT' => true, 'ZRANK' => true, 'ZREVRANK' => true, 'ZSCAN' => true, 'HGET' => true, 'HMGET' => true, 'HEXISTS' => true, 'HLEN' => true, 'HKEYS' => true, 'HVALS' => true, 'HGETALL' => true, 'HSCAN' => true, 'PING' => true, 'AUTH' => true, 'SELECT' => true, 'ECHO' => true, 'QUIT' => true, 'OBJECT' => true, 'BITCOUNT' => true, 'TIME' => true, 'SORT' => array($this, 'isSortReadOnly'), ); } }
package statsd import "time" // A NoopClient is a client that does nothing. type NoopClient struct{} // Close closes the connection and cleans up. func (s *NoopClient) Close() error { return nil } // Inc increments a statsd count type. // stat is a string name for the metric. // value is the integer value // rate is the sample rate (0.0 to 1.0) func (s *NoopClient) Inc(stat string, value int64, rate float32) error { return nil } // Dec decrements a statsd count type. // stat is a string name for the metric. // value is the integer value. // rate is the sample rate (0.0 to 1.0). func (s *NoopClient) Dec(stat string, value int64, rate float32) error { return nil } // Gauge submits/Updates a statsd gauge type. // stat is a string name for the metric. // value is the integer value. // rate is the sample rate (0.0 to 1.0). func (s *NoopClient) Gauge(stat string, value int64, rate float32) error { return nil } // GaugeDelta submits a delta to a statsd gauge. // stat is the string name for the metric. // value is the (positive or negative) change. // rate is the sample rate (0.0 to 1.0). func (s *NoopClient) GaugeDelta(stat string, value int64, rate float32) error { return nil } // Timing submits a statsd timing type. // stat is a string name for the metric. // delta is the time duration value in milliseconds // rate is the sample rate (0.0 to 1.0). func (s *NoopClient) Timing(stat string, delta int64, rate float32) error { return nil } // TimingDuration submits a statsd timing type. // stat is a string name for the metric. // delta is the timing value as time.Duration // rate is the sample rate (0.0 to 1.0). func (s *NoopClient) TimingDuration(stat string, delta time.Duration, rate float32) error { return nil } // Set submits a stats set type. // stat is a string name for the metric. // value is the string value // rate is the sample rate (0.0 to 1.0). func (s *NoopClient) Set(stat string, value string, rate float32) error { return nil } // SetInt submits a number as a stats set type. // convenience method for Set with number. // stat is a string name for the metric. // value is the integer value // rate is the sample rate (0.0 to 1.0). func (s *NoopClient) SetInt(stat string, value int64, rate float32) error { return nil } // Raw formats the statsd event data, handles sampling, prepares it, // and sends it to the server. // stat is the string name for the metric. // value is the preformatted "raw" value string. // rate is the sample rate (0.0 to 1.0). func (s *NoopClient) Raw(stat string, value string, rate float32) error { return nil } // SetPrefix sets/updates the statsd client prefix func (s *NoopClient) SetPrefix(prefix string) {} // NewSubStatter returns a SubStatter with appended prefix func (s *NoopClient) NewSubStatter(prefix string) SubStatter { return &NoopClient{} } // SetSamplerFunc sets the sampler function func (s *NoopClient) SetSamplerFunc(sampler SamplerFunc) {} // NewNoopClient returns a pointer to a new NoopClient, and an error (always // nil, just supplied to support api convention). // Use variadic arguments to support identical format as NewClient, or a more // conventional no argument form. func NewNoopClient(a ...interface{}) (Statter, error) { return &NoopClient{}, nil } // NewNoop is a compatibility alias for NewNoopClient var NewNoop = NewNoopClient
$(document).ready(function() { function filterPath(string) { return string .replace(/^\//,'') .replace(/(index|default).[a-zA-Z]{3,4}$/,'') .replace(/\/$/,''); } var locationPath = filterPath(location.pathname); $('a[href*=#]').each(function() { var thisPath = filterPath(this.pathname) || locationPath; if ( locationPath == thisPath && (location.hostname == this.hostname || !this.hostname) && this.hash.replace(/#/,'') ) { var $target = $(this.hash), target = this.hash; if (target) { $(this).click(function(event) { if (!$(this.hash).offset()) { return; } event.preventDefault(); position = $(this.hash).offset().top; $('html,body').animate({scrollTop: position}, 400, function() { location.hash = target; }); }); } } }); });
package org.apache.cordova.media; import android.media.AudioManager; import android.media.MediaPlayer; import android.media.MediaPlayer.OnCompletionListener; import android.media.MediaPlayer.OnErrorListener; import android.media.MediaPlayer.OnPreparedListener; import android.media.MediaRecorder; import android.os.Environment; import android.util.Log; import java.io.File; import java.io.FileInputStream; import java.io.IOException; /** * This class implements the audio playback and recording capabilities used by Cordova. * It is called by the AudioHandler Cordova class. * Only one file can be played or recorded per class instance. * * Local audio files must reside in one of two places: * android_asset: file name must start with /android_asset/sound.mp3 * sdcard: file name is just sound.mp3 */ public class AudioPlayer implements OnCompletionListener, OnPreparedListener, OnErrorListener { // AudioPlayer modes public enum MODE { NONE, PLAY, RECORD }; // AudioPlayer states public enum STATE { MEDIA_NONE, MEDIA_STARTING, MEDIA_RUNNING, MEDIA_PAUSED, MEDIA_STOPPED, MEDIA_LOADING }; private static final String LOG_TAG = "AudioPlayer"; // AudioPlayer message ids private static int MEDIA_STATE = 1; private static int MEDIA_DURATION = 2; private static int MEDIA_POSITION = 3; private static int MEDIA_ERROR = 9; // Media error codes private static int MEDIA_ERR_NONE_ACTIVE = 0; private static int MEDIA_ERR_ABORTED = 1; // private static int MEDIA_ERR_NETWORK = 2; // private static int MEDIA_ERR_DECODE = 3; // private static int MEDIA_ERR_NONE_SUPPORTED = 4; private AudioHandler handler; // The AudioHandler object private String id; // The id of this player (used to identify Media object in JavaScript) private MODE mode = MODE.NONE; // Playback or Recording mode private STATE state = STATE.MEDIA_NONE; // State of recording or playback private String audioFile = null; // File name to play or record to private float duration = -1; // Duration of audio private MediaRecorder recorder = null; // Audio recording object private String tempFile = null; // Temporary recording file name private MediaPlayer player = null; // Audio player object private boolean prepareOnly = true; // playback after file prepare flag private int seekOnPrepared = 0; // seek to this location once media is prepared /** * Constructor. * * @param handler The audio handler object * @param id The id of this audio player */ public AudioPlayer(AudioHandler handler, String id, String file) { this.handler = handler; this.id = id; this.audioFile = file; this.recorder = new MediaRecorder(); if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) { this.tempFile = Environment.getExternalStorageDirectory().getAbsolutePath() + "/tmprecording.3gp"; } else { this.tempFile = "/data/data/" + handler.cordova.getActivity().getPackageName() + "/cache/tmprecording.3gp"; } } /** * Destroy player and stop audio playing or recording. */ public void destroy() { // Stop any play or record if (this.player != null) { if ((this.state == STATE.MEDIA_RUNNING) || (this.state == STATE.MEDIA_PAUSED)) { this.player.stop(); this.setState(STATE.MEDIA_STOPPED); } this.player.release(); this.player = null; } if (this.recorder != null) { this.stopRecording(); this.recorder.release(); this.recorder = null; } } /** * Start recording the specified file. * * @param file The name of the file */ public void startRecording(String file) { switch (this.mode) { case PLAY: Log.d(LOG_TAG, "AudioPlayer Error: Can't record in play mode."); this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', "+MEDIA_ERROR+", { \"code\":"+MEDIA_ERR_ABORTED+"});"); break; case NONE: this.audioFile = file; this.recorder.setAudioSource(MediaRecorder.AudioSource.MIC); this.recorder.setOutputFormat(MediaRecorder.OutputFormat.DEFAULT); // THREE_GPP); this.recorder.setAudioEncoder(MediaRecorder.AudioEncoder.DEFAULT); //AMR_NB); this.recorder.setOutputFile(this.tempFile); try { this.recorder.prepare(); this.recorder.start(); this.setState(STATE.MEDIA_RUNNING); return; } catch (IllegalStateException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', "+MEDIA_ERROR+", { \"code\":"+MEDIA_ERR_ABORTED+"});"); break; case RECORD: Log.d(LOG_TAG, "AudioPlayer Error: Already recording."); this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', "+MEDIA_ERROR+", { \"code\":"+MEDIA_ERR_ABORTED+"});"); } } /** * Save temporary recorded file to specified name * * @param file */ public void moveFile(String file) { /* this is a hack to save the file as the specified name */ File f = new File(this.tempFile); if (!file.startsWith("/")) { if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) { file = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + file; } else { file = "/data/data/" + handler.cordova.getActivity().getPackageName() + "/cache/" + file; } } String logMsg = "renaming " + this.tempFile + " to " + file; Log.d(LOG_TAG, logMsg); if (!f.renameTo(new File(file))) Log.e(LOG_TAG, "FAILED " + logMsg); } /** * Stop recording and save to the file specified when recording started. */ public void stopRecording() { if (this.recorder != null) { try{ if (this.state == STATE.MEDIA_RUNNING) { this.recorder.stop(); this.setState(STATE.MEDIA_STOPPED); } this.recorder.reset(); this.moveFile(this.audioFile); } catch (Exception e) { e.printStackTrace(); } } } //========================================================================== // Playback //========================================================================== /** * Start or resume playing audio file. * * @param file The name of the audio file. */ public void startPlaying(String file) { if (this.readyPlayer(file) && this.player != null) { this.player.start(); this.setState(STATE.MEDIA_RUNNING); this.seekOnPrepared = 0; //insures this is always reset } else { this.prepareOnly = false; } } /** * Seek or jump to a new time in the track. */ public void seekToPlaying(int milliseconds) { if (this.readyPlayer(this.audioFile)) { this.player.seekTo(milliseconds); Log.d(LOG_TAG, "Send a onStatus update for the new seek"); this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', " + MEDIA_POSITION + ", " + milliseconds / 1000.0f + ");"); } else { this.seekOnPrepared = milliseconds; } } /** * Pause playing. */ public void pausePlaying() { // If playing, then pause if (this.state == STATE.MEDIA_RUNNING && this.player != null) { this.player.pause(); this.setState(STATE.MEDIA_PAUSED); } else { Log.d(LOG_TAG, "AudioPlayer Error: pausePlaying() called during invalid state: " + this.state.ordinal()); this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', " + MEDIA_ERROR + ", { \"code\":" + MEDIA_ERR_NONE_ACTIVE + "});"); } } /** * Stop playing the audio file. */ public void stopPlaying() { if ((this.state == STATE.MEDIA_RUNNING) || (this.state == STATE.MEDIA_PAUSED)) { this.player.pause(); this.player.seekTo(0); Log.d(LOG_TAG, "stopPlaying is calling stopped"); this.setState(STATE.MEDIA_STOPPED); } else { Log.d(LOG_TAG, "AudioPlayer Error: stopPlaying() called during invalid state: " + this.state.ordinal()); this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', " + MEDIA_ERROR + ", { \"code\":" + MEDIA_ERR_NONE_ACTIVE + "});"); } } /** * Callback to be invoked when playback of a media source has completed. * * @param player The MediaPlayer that reached the end of the file */ public void onCompletion(MediaPlayer player) { Log.d(LOG_TAG, "on completion is calling stopped"); this.setState(STATE.MEDIA_STOPPED); } /** * Get current position of playback. * * @return position in msec or -1 if not playing */ public long getCurrentPosition() { if ((this.state == STATE.MEDIA_RUNNING) || (this.state == STATE.MEDIA_PAUSED)) { int curPos = this.player.getCurrentPosition(); this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', " + MEDIA_POSITION + ", " + curPos / 1000.0f + ");"); return curPos; } else { return -1; } } /** * Determine if playback file is streaming or local. * It is streaming if file name starts with "http://" * * @param file The file name * @return T=streaming, F=local */ public boolean isStreaming(String file) { if (file.contains("http://") || file.contains("https://")) { return true; } else { return false; } } /** * Get the duration of the audio file. * * @param file The name of the audio file. * @return The duration in msec. * -1=can't be determined * -2=not allowed */ public float getDuration(String file) { // Can't get duration of recording if (this.recorder != null) { return (-2); // not allowed } // If audio file already loaded and started, then return duration if (this.player != null) { return this.duration; } // If no player yet, then create one else { this.prepareOnly = true; this.startPlaying(file); // This will only return value for local, since streaming // file hasn't been read yet. return this.duration; } } /** * Callback to be invoked when the media source is ready for playback. * * @param player The MediaPlayer that is ready for playback */ public void onPrepared(MediaPlayer player) { // Listen for playback completion this.player.setOnCompletionListener(this); // seek to any location received while not prepared this.seekToPlaying(this.seekOnPrepared); // If start playing after prepared if (!this.prepareOnly) { this.player.start(); this.setState(STATE.MEDIA_RUNNING); this.seekOnPrepared = 0; //reset only when played } else { this.setState(STATE.MEDIA_STARTING); } // Save off duration this.duration = getDurationInSeconds(); // reset prepare only flag this.prepareOnly = true; // Send status notification to JavaScript this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', " + MEDIA_DURATION + "," + this.duration + ");"); } /** * By default Android returns the length of audio in mills but we want seconds * * @return length of clip in seconds */ private float getDurationInSeconds() { return (this.player.getDuration() / 1000.0f); } /** * Callback to be invoked when there has been an error during an asynchronous operation * (other errors will throw exceptions at method call time). * * @param player the MediaPlayer the error pertains to * @param arg1 the type of error that has occurred: (MEDIA_ERROR_UNKNOWN, MEDIA_ERROR_SERVER_DIED) * @param arg2 an extra code, specific to the error. */ public boolean onError(MediaPlayer player, int arg1, int arg2) { Log.d(LOG_TAG, "AudioPlayer.onError(" + arg1 + ", " + arg2 + ")"); // TODO: Not sure if this needs to be sent? this.player.stop(); this.player.release(); // Send error notification to JavaScript this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', { \"code\":" + arg1 + "});"); return false; } /** * Set the state and send it to JavaScript. * * @param state */ private void setState(STATE state) { if (this.state != state) { this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', " + MEDIA_STATE + ", " + state.ordinal() + ");"); } this.state = state; } /** * Set the mode and send it to JavaScript. * * @param state */ private void setMode(MODE mode) { if (this.mode != mode) { //mode is not part of the expected behavior, so no notification //this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', " + MEDIA_STATE + ", " + mode + ");"); } this.mode = mode; } /** * Get the audio state. * * @return int */ public int getState() { return this.state.ordinal(); } /** * Set the volume for audio player * * @param volume */ public void setVolume(float volume) { this.player.setVolume(volume, volume); } /** * attempts to put the player in play mode * @return true if in playmode, false otherwise */ private boolean playMode() { switch(this.mode) { case NONE: this.setMode(MODE.PLAY); break; case PLAY: break; case RECORD: Log.d(LOG_TAG, "AudioPlayer Error: Can't play in record mode."); this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', " + MEDIA_ERROR + ", { \"code\":" + MEDIA_ERR_ABORTED + "});"); return false; //player is not ready } return true; } /** * attempts to initialize the media player for playback * @param file the file to play * @return false if player not ready, reports if in wrong mode or state */ private boolean readyPlayer(String file) { if (playMode()) { switch (this.state) { case MEDIA_NONE: if (this.player == null) { this.player = new MediaPlayer(); } try { this.loadAudioFile(file); } catch (Exception e) { this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', "+MEDIA_ERROR+", { \"code\":"+MEDIA_ERR_ABORTED+"});"); } return false; case MEDIA_LOADING: //cordova js is not aware of MEDIA_LOADING, so we send MEDIA_STARTING instead Log.d(LOG_TAG, "AudioPlayer Loading: startPlaying() called during media preparation: " + STATE.MEDIA_STARTING.ordinal()); this.prepareOnly = false; return false; case MEDIA_STARTING: case MEDIA_RUNNING: case MEDIA_PAUSED: return true; case MEDIA_STOPPED: //if we are readying the same file if (this.audioFile.compareTo(file) == 0) { //reset the audio file player.seekTo(0); player.pause(); return true; } else { //reset the player this.player.reset(); try { this.loadAudioFile(file); } catch (Exception e) { this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', " + MEDIA_ERROR + ", { \"code\":" + MEDIA_ERR_ABORTED + "});"); } //if we had to prepare= the file, we won't be in the correct state for playback return false; } default: Log.d(LOG_TAG, "AudioPlayer Error: startPlaying() called during invalid state: " + this.state); this.handler.webView.sendJavascript("cordova.require('org.apache.cordova.media.Media').onStatus('" + this.id + "', " + MEDIA_ERROR + ", { \"code\":" + MEDIA_ERR_ABORTED + "});"); } } return false; } /** * load audio file * @throws IOException * @throws IllegalStateException * @throws SecurityException * @throws IllegalArgumentException */ private void loadAudioFile(String file) throws IllegalArgumentException, SecurityException, IllegalStateException, IOException { if (this.isStreaming(file)) { this.player.setDataSource(file); this.player.setAudioStreamType(AudioManager.STREAM_MUSIC); //if it's a streaming file, play mode is implied this.setMode(MODE.PLAY); this.setState(STATE.MEDIA_STARTING); this.player.setOnPreparedListener(this); this.player.prepareAsync(); } else { if (file.startsWith("/android_asset/")) { String f = file.substring(15); android.content.res.AssetFileDescriptor fd = this.handler.cordova.getActivity().getAssets().openFd(f); this.player.setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getLength()); } else { File fp = new File(file); if (fp.exists()) { FileInputStream fileInputStream = new FileInputStream(file); this.player.setDataSource(fileInputStream.getFD()); fileInputStream.close(); } else { this.player.setDataSource(Environment.getExternalStorageDirectory().getPath() + "/" + file); } } this.setState(STATE.MEDIA_STARTING); this.player.setOnPreparedListener(this); this.player.prepare(); // Get duration this.duration = getDurationInSeconds(); } } }
namespace boost { namespace spirit { /////////////////////////////////////////////////////////////////////////// // Enablers /////////////////////////////////////////////////////////////////////////// template <> struct use_directive<karma::domain, tag::omit> // enables omit : mpl::true_ {}; template <> struct use_directive<karma::domain, tag::skip> // enables skip : mpl::true_ {}; }} namespace boost { namespace spirit { namespace karma { #ifndef BOOST_SPIRIT_NO_PREDEFINED_TERMINALS using spirit::omit; using spirit::skip; #endif using spirit::omit_type; using spirit::skip_type; /////////////////////////////////////////////////////////////////////////// // omit_directive consumes the attribute of subject generator without // generating anything /////////////////////////////////////////////////////////////////////////// template <typename Subject, bool Execute> struct omit_directive : unary_generator<omit_directive<Subject, Execute> > { typedef Subject subject_type; typedef mpl::int_< generator_properties::disabling | subject_type::properties::value > properties; omit_directive(Subject const& subject) : subject(subject) {} template <typename Context, typename Iterator = unused_type> struct attribute : traits::attribute_of<subject_type, Context, Iterator> {}; template <typename OutputIterator, typename Context, typename Delimiter , typename Attribute> bool generate(OutputIterator& sink, Context& ctx, Delimiter const& d , Attribute const& attr) const { // We need to actually compile the output operation as we don't // have any other means to verify, whether the passed attribute is // compatible with the subject. // omit[] will execute the code, while skip[] doesn't execute it if (Execute) { // wrap the given output iterator to avoid output detail::disable_output<OutputIterator> disable(sink); return subject.generate(sink, ctx, d, attr); } return true; } template <typename Context> info what(Context& context) const { return info(Execute ? "omit" : "skip", subject.what(context)); } Subject subject; }; /////////////////////////////////////////////////////////////////////////// // Generator generators: make_xxx function (objects) /////////////////////////////////////////////////////////////////////////// template <typename Subject, typename Modifiers> struct make_directive<tag::omit, Subject, Modifiers> { typedef omit_directive<Subject, true> result_type; result_type operator()(unused_type, Subject const& subject , unused_type) const { return result_type(subject); } }; template <typename Subject, typename Modifiers> struct make_directive<tag::skip, Subject, Modifiers> { typedef omit_directive<Subject, false> result_type; result_type operator()(unused_type, Subject const& subject , unused_type) const { return result_type(subject); } }; }}} namespace boost { namespace spirit { namespace traits { /////////////////////////////////////////////////////////////////////////// template <typename Subject, bool Execute> struct has_semantic_action<karma::omit_directive<Subject, Execute> > : unary_has_semantic_action<Subject> {}; /////////////////////////////////////////////////////////////////////////// template <typename Subject, bool Execute, typename Attribute , typename Context, typename Iterator> struct handles_container<karma::omit_directive<Subject, Execute>, Attribute , Context, Iterator> : unary_handles_container<Subject, Attribute, Context, Iterator> {}; }}} #endif
// // This file is part of nuBASIC // Copyright (c) Antonino Calderone (antonino.calderone@gmail.com) // All rights reserved. // Licensed under the MIT License. // See COPYING file in the project root for full license information. // /* -------------------------------------------------------------------------- */ #ifndef __NU_INSTRBLOCK_METADATA_H__ #define __NU_INSTRBLOCK_METADATA_H__ /* -------------------------------------------------------------------------- */ #include "nu_flag_map.h" #include "nu_prog_pointer.h" #include <deque> #include <list> #include <map> #include <memory> #include <sstream> #include <stack> #include <set> /* -------------------------------------------------------------------------- */ namespace nu { /* -------------------------------------------------------------------------- */ // Instr block provides context for 'while' and 'if' statements // with multi-lines syntax implementation struct instrblock_t { using handle_t = std::shared_ptr<instrblock_t>; enum { EXIT }; instrblock_t() noexcept { flag.define(EXIT); } instrblock_t(const instrblock_t&) = default; instrblock_t& operator=(const instrblock_t&) = default; void trace(std::stringstream& ss, bool include_else = false); std::string identifier; flag_map_t flag; prog_pointer_t pc_begin_stmt; prog_pointer_t pc_else_stmt; // used by IF...THEN...ELSE... stmt prog_pointer_t pc_end_stmt; }; /* -------------------------------------------------------------------------- */ struct instrblock_metadata_t { using begin_tbl_t = std::map<prog_pointer_t::line_number_t, instrblock_t::handle_t>; using end_tbl_t = std::map<prog_pointer_t::line_number_t, prog_pointer_t::line_number_t>; using build_stack_t = std::deque<prog_pointer_t::line_number_t>; protected: begin_tbl_t begin_tbl; // line -> block end_tbl_t end_tbl; // end-line -> begin line build_stack_t build_stack; // build stack end_tbl_t exit_tbl; // exit-line -> begin line public: bool empty() const noexcept { return begin_tbl.empty(); } size_t get_exit_point_cnt() const noexcept { return exit_tbl.size(); } void trace(std::stringstream& ss, bool include_else = false); void clear(); void compile_begin( const prog_pointer_t& pc, const std::string& identifier = ""); instrblock_t::handle_t compile_end(const prog_pointer_t& pc); instrblock_t::handle_t compile_exit_point(const prog_pointer_t& pc); instrblock_t::handle_t begin_find( const prog_pointer_t::line_number_t& line) const; instrblock_t::handle_t begin_find(const prog_pointer_t& pc) const; instrblock_t::handle_t end_find(const prog_pointer_t& pc) const; instrblock_t::handle_t exit_find(const prog_pointer_t& pc) const; }; /* -------------------------------------------------------------------------- */ struct if_instrblock_t { prog_pointer_t pc_if_stmt; prog_pointer_t pc_endif_stmt; std::list<prog_pointer_t> else_list; bool condition = false; void clear(); friend std::ostream& operator<<( std::ostream& os, const if_instrblock_t& ib); }; /* -------------------------------------------------------------------------- */ struct stop_instr_metadata_t { std::set<prog_pointer_t> pc_stop_stmt; friend std::ostream& operator<<( std::ostream& os, const stop_instr_metadata_t& ib); }; /* -------------------------------------------------------------------------- */ struct if_instrblock_metadata_t { std::stack<prog_pointer_t> pc_stack; std::map<prog_pointer_t, if_instrblock_t> data; std::map<prog_pointer_t, prog_pointer_t> block_to_if_line_tbl; void clear(); friend std::ostream& operator<<( std::ostream& os, const if_instrblock_metadata_t& md); }; /* -------------------------------------------------------------------------- */ } /* -------------------------------------------------------------------------- */ #endif // __NU_INSTRBLOCK_METADATA_H__
""" A module to produce performance plots about tracking approaches. >>> engines = [LinearInterpolationEngine(), ... DynamicProgrammingEngine(), ... ActiveLearnLinearEngine()] >>> cpfs = [.01, .02, .05, .1] >>> frames = filetovideo("/scratch/frames/") >>> data = load(["video.txt", "video2.txt"], frames) >>> data = build(data, cpfs, engines, multiprocessing.Pool(24)) >>> fig = plotperformance(data, lambda x, y: x.percentoverlap(y) > .5) >>> fig.show() """ from vision.track import interpolation from vision.track import dp from vision import annotations from vision import frameiterator, readpaths from vision.alearn import marginals from vision import visualize from math import ceil, floor import itertools import pylab from mpl_toolkits.mplot3d import Axes3D from matplotlib import cm, mpl import logging import os logger = logging.getLogger("vision.reporting") class Engine(object): """ An engine to predict given a fixed number of clicks. """ def __call__(self, video, groundtruths, cpfs, pool = None): """ Returns a dictionary of predicted tracks given the ground truth. Each key in the dictionary must be a click-per-frame as specified by the cpfs parameter and the value is a predicted path. """ raise NotImplementedError("__call__() must be implemented") def __str__(self): """ Returns the name of the engine, for reporting in the graph. """ name = self.__class__.__name__ if name[-6:] == "Engine": name = name[0:-6] return name def color(self): """ Returns the color used to represent this engine. """ raise NotImplementedError("color() must be implemented") class FixedRateEngine(Engine): """ An abstract engine that uses a fixed skip interval (e.g., linear interpolation). """ def __call__(self, video, gtruths, cpfs, pool = None): """ Computes the correct skip for a given cpf and builds the dictionary. Child classes should implement predict(). """ result = {} numframes = sum(x[-1].frame - x[0].frame for x in gtruths.values()) logger.info("Total of {0} frames".format(numframes)) for cpf in cpfs: clicks = int(cpf * numframes) usedclicks = 0 logger.info("CPF {0} has {1} clicks".format(cpf, clicks)) schedule = {} for id, gtruth in gtruths.items(): gtruth.sort(key = lambda x: x.frame) pathclicks = clicks pathclicks *= float(gtruth[-1].frame - gtruth[0].frame) pathclicks /= numframes pathclicks = int(floor(pathclicks)) schedule[id] = max(pathclicks, 1) usedclicks += schedule[id] for id, _ in zip(itertools.cycle(gtruths.keys()), range(clicks - usedclicks)): schedule[id] += 1 for id, clicksinschedule in schedule.items(): logger.info("ID {0} has {1} clicks for {2} frames".format(id, clicksinschedule, len(gtruths[id]))) for id, gtruth in gtruths.items(): skip = int(ceil(float(gtruth[-1].frame - gtruth[0].frame) / schedule[id])) given = gtruth[::skip] given = given[:schedule[id]] if id not in result: result[id] = {} logger.info("Processing {0} with {1} clicks".format(id, schedule[id])) result[id][cpf] = self.predict(video, given, gtruth[-1].frame, pool = pool) return result def predict(self, video, given, last, pool): """ Given a video and a sparse path, predict the missing annotations. """ raise NotImplementedError("predict() must be implemented") class LinearInterpolationEngine(FixedRateEngine): """ Uses linear interpolation to predict missing annotations as a fixed rate engine. """ def predict(self, video, given, last, pool): path = interpolation.LinearFill(given) while path[-1].frame <= last: path.append(annotations.Box(path[-1].xtl, path[-1].ytl, path[-1].xbr, path[-1].ybr, path[-1].frame + 1)) return path def color(self): return "b" class DynamicProgrammingEngine(FixedRateEngine): """ Uses a dynamic programming based tracker to predict the missing annotations. """ def __init__(self, pairwisecost = 0.001, upperthreshold = 10, skip = 3, rgbbin = 8, hogbin = 8): self.pairwisecost = pairwisecost self.upperthreshold = upperthreshold self.skip = skip self.rgbbin = rgbbin self.hogbin = hogbin def predict(self, video, given, last, pool): return dp.fill(given, video, last = last, pool = pool, pairwisecost = self.pairwisecost, upperthreshold = self.upperthreshold, skip = self.skip, rgbbin = self.rgbbin, hogbin = self.hogbin) def color(self): return "r" class ActiveLearnDPEngine(Engine): """ Uses an active learning approach to annotate the most informative frames. """ def __init__(self, pairwisecost = 0.001, upperthreshold = 10, sigma = .1, erroroverlap = 0.5, skip = 3, rgbbin = 8, hogbin = 8): self.pairwisecost = pairwisecost self.upperthreshold = upperthreshold self.sigma = sigma self.erroroverlap = erroroverlap self.skip = skip self.rgbbin = rgbbin self.hogbin = hogbin def __call__(self, video, gtruths, cpfs, pool = None): result = {} pathdict = {} for id, gtruth in gtruths.items(): gtruth.sort(key = lambda x: x.frame) pathdict[id] = dict((x.frame, x) for x in gtruth) requests = {} for id, gtruth in gtruths.items(): frame, score, predicted, _ = marginals.pick([gtruth[0]], video, last = gtruth[-1].frame, pool = pool, pairwisecost = self.pairwisecost, upperthreshold = self.upperthreshold, sigma = self.sigma, erroroverlap = self.erroroverlap, skip = self.skip, rgbbin = self.rgbbin, hogbin = self.hogbin) requests[id] = (score, frame, predicted, [gtruth[0]]) result[id] = {} usedclicks = len(gtruths) logger.info("Used {0} clicks!".format(usedclicks)) numframes = sum(x[-1].frame - x[0].frame for x in gtruths.values()) reqclicks = [(int(numframes * x), x) for x in cpfs] reqclicks.sort() for clicks, cpf in reqclicks: for _ in range(clicks - usedclicks): id = max((y[0], x) for x, y in requests.items())[1] givens = list(requests[id][3]) givens.append(pathdict[id][requests[id][1]]) givens.sort(key = lambda x: x.frame) frame, score, predicted, _ = marginals.pick(givens, video, last = max(pathdict[id]), pool = pool, pairwisecost = self.pairwisecost, upperthreshold = self.upperthreshold, sigma = self.sigma, erroroverlap = self.erroroverlap, skip = self.skip, rgbbin = self.rgbbin, hogbin = self.hogbin) requests[id] = (score, frame, predicted, givens) usedclicks += 1 logger.info("Used {0} clicks with {1} total in this cpf!" .format(usedclicks, clicks)) for id, (_, _, path, _) in requests.iteritems(): result[id][cpf] = path return result def color(self): return "g" class PercentOverlap(object): def __init__(self, threshold): self.threshold = threshold def __call__(self, x, y): if x.frame != y.frame: raise RuntimeError("Frames do not match") if x.percentoverlap(y) >= self.threshold: return 0 else: return 1 def __str__(self): return "Percent Overlap >= {0}".format(self.threshold) class Intersection(object): def __call__(self, x, y): if x.frame != y.frame: raise RuntimeError("Frames do not match") if x.intersects(y): return 0 else: return 1 def __str__(self): return "Intersection" def filetovideo(base): def process(filename): name = os.path.splitext(os.path.basename(filename))[0] return frameiterator(base + "/" + name) return process def load(data, frames, onlylabels = None, breakup = True, limit = None, toframe = None): """ Produces a list over tracks found in the files for data. frames should be a callable that returns a frame iterator. """ result = [] numpaths = 0 for file in data: video = frames(file) paths = [] for label, path in readpaths(open(file)): if not onlylabels or label in onlylabels: if breakup: path.sort(key = lambda x: x.frame) currentpath = [] for box in path: if box.lost: if len(currentpath) > 1: paths.append((label, currentpath)) currentpath = [] numpaths += 1 else: currentpath.append(box) if len(currentpath) > 1: paths.append((label, currentpath)) numpaths += 1 elif len(currentpath) > 1: paths.append((label, path)) numpaths += 1 result.append((video, paths)) # cut after a certain frame if toframe: cutresult = [] for video, paths in result: pathsresult = [] for label, path in paths: filtered = [x for x in path if x.frame <= toframe] if filtered: pathsresult.append((label, filtered)) if pathsresult: cutresult.append((video, pathsresult)) result = cutresult # limit the number of videos if limit: limitresult = [] counter = 0 for video, paths in result: pathsresult = [] for label, path in paths: counter += 1 if counter <= limit: pathsresult.append((label, path)) if pathsresult: limitresult.append((video, pathsresult)) result = limitresult return result def merge(datas): merged = {} strmapping = {} for data in datas: for engine, predictions in data.iteritems(): if str(engine) not in strmapping: strmapping[str(engine)] = engine merged[engine] = [] print strmapping merged[strmapping[str(engine)]].extend(predictions) return merged def build(data, cpfs, engines, pool = None): """ Takes the data and runs the engines. """ result = {} for engine in engines: logger.info("Computing tracks with {0}".format(str(engine))) result[engine] = [] for video, paths in data: for path in paths: path[1].sort(key = lambda x: x.frame) paths = [x[1] for x in paths] keys = range(len(paths)) paths = dict(zip(keys, paths)) predictions = engine(video, paths, cpfs, pool) result[engine].extend((predictions[x], paths[x], video) for x in keys) return result def scoreperformance(data, scorer): """ Plots a performance curve for the data with the specified engines. """ results = {} for engine, predictions in data.iteritems(): logger.info("Plotting and scoring tracks for {0}".format(str(engine))) scores = {} lengths = {} for prediction, groundtruth, video in predictions: for cpf, path in prediction.iteritems(): if cpf not in scores: scores[cpf] = 0 lengths[cpf] = 0 try: score = sum(scorer(x,y) for x, y in zip(path, groundtruth)) except Exception as e: logger.exception(e) else: scores[cpf] += score lengths[cpf] += len(path) # normalize scores for cpf in scores: scores[cpf] = scores[cpf] / float(lengths[cpf]) results[engine] = zip(*sorted(scores.items())) return results def plotperformance(data, scorer, only = []): fig = pylab.figure() for engine, (x, y) in scoreperformance(data, scorer).items(): if only and str(engine) not in only: continue pylab.plot(x, y, "{0}.-".format(engine.color()), label = str(engine), linewidth = 4) pylab.ylabel("Average error per frame ({0})".format(str(scorer))) pylab.xlabel("Average clicks per frame per object") pylab.legend() pylab.show() def plotcorrect(data, scorer, threshold = 0): fig = pylab.figure() for engine, predictions in data.iteritems(): counter = {} for prediction, groundtruth, video in predictions: for cpf, path in prediction.iteritems(): if cpf not in counter: counter[cpf] = 0 try: score = sum(scorer(x, y) for x, y in zip(path, groundtruth)) except Exception as e: logger.exception(e) else: if score <= threshold: counter[cpf] += 1 x, y = zip(*sorted(counter.items())) pylab.plot(x, y, "{0}.-".format(engine.color()), label = str(engine), linewidth = 4) pylab.ylabel("Number of completely correct tracks ({0}, threshold = {1})".format(str(scorer), threshold)) pylab.xlabel("Average clicks per frame per object") pylab.legend() pylab.show() def plotsurface(input, scorer, left, right, cpucostfact = 20, humancostfact = 2500 / 5): data = scoreperformance(input, scorer) # find left and right for potential in data.keys(): if str(potential) == left: left = potential elif str(potential) == right: right = potential cpucost = numpy.arange(0, 1.05, 0.05) humcost = numpy.asarray(sorted(data[left][0])) error = numpy.zeros((humcost.shape[0], cpucost.shape[0])) cpucost, humcost = numpy.meshgrid(cpucost, humcost) fig = plt.figure() ax = Axes3D(fig) ax.set_xlabel("CPU Cost") ax.set_ylabel("Human Cost") ax.set_zlabel("Error") ax.plot_surface(cpucost, humancost, error) ax.legend() plt.show() def visualizepaths(data, dir): for engine, predictions in data.iteritems(): for id, (prediction, groundtruth, video) in enumerate(predictions): for cpf, path in prediction.iteritems(): logger.info("Visualizing engine {0} path {1} cpf {2}" .format(str(engine), id, cpf)) filepath = "{0}/{1}/{2}/{3}".format(dir, str(engine), id, cpf) try: os.makedirs(filepath) except OSError: pass vis = visualize.highlight_paths(video, [path, groundtruth]) visualize.save(vis, lambda x: "{0}/{1}.jpg".format(filepath, x))
module('Keyboard Navigation (All)', { setup: function(){ this.input = $('<input type="text">') .appendTo('#qunit-fixture') .datepicker({format: "dd-mm-yyyy"}) .focus(); // Activate for visibility checks this.dp = this.input.data('datepicker') this.picker = this.dp.picker; }, teardown: function(){ this.picker.remove(); } }); test('TAB hides picker', function(){ var target; ok(this.picker.is(':visible'), 'Picker is visible'); this.input.trigger({ type: 'keydown', keyCode: 9 }); ok(this.picker.is(':not(:visible)'), 'Picker is hidden'); });
set -e trap "cleanup" 0 1 2 3 9 11 13 15 # ME=export.sh ME=$(basename ${0}) SRC=$(dirname $(dirname $(readlink -f $0))) echo Source directory=${SRC} usage() { echo echo "Usage: ${ME} [DIR] [TAG]" exit 1 } cleanup() { trap - 0 1 2 3 9 11 13 15 echo [ ${WORKDIR} ] && [ -d ${WORKDIR} ] && rm -rf ${WORKDIR} } DIR=$PWD # This will get the latest created tag TAG=$(git describe --tags --always) echo Using tag ${TAG} to create archive ## ## Allow overrides to be passed on the cmdline ## if [ $# -gt 2 ]; then usage elif [ $# -ge 1 ]; then DIR=$1 if [ $# -eq 2 ]; then TAG=$2 fi fi # verify the tag exists git rev-list -1 tags/${TAG} -- >/dev/null || usage # mktemp command creates a temp directory for example - /tmp/tmp.k8vDddIzni WORKDIR=$(mktemp -d) echo Working Directory=${WORKDIR} ## ## Create the archive ## ( cd ${SRC} MTIME=$(date -d @`git log -1 --pretty=format:%ct tags/${TAG}` '+%Y-%m-%d %H:%M:%S') VERSION=$(git show tags/${TAG}:VERSION.txt) ARCHIVE=$DIR/qpid-dispatch-${VERSION}.tar.gz PREFIX=qpid-dispatch-${VERSION} [ -d ${WORKDIR} ] || mkdir -p ${WORKDIR} git archive --format=tar --prefix=${PREFIX}/ tags/${TAG} \ | tar -x -C ${WORKDIR} cd ${WORKDIR} tar -c -z \ --owner=root --group=root --numeric-owner \ --mtime="${MTIME}" \ -f ${ARCHIVE} ${PREFIX} echo Created "${ARCHIVE}" echo Success!!! )
#include "tensorflow/java/src/main/native/server_jni.h" #include "tensorflow/c/c_api.h" #include "tensorflow/java/src/main/native/exception_jni.h" #include "tensorflow/java/src/main/native/utils_jni.h" namespace { TF_Server* requireHandle(JNIEnv* env, jlong handle) { static_assert(sizeof(jlong) >= sizeof(TF_Server*), "Cannot package C object pointers as a Java long"); if (handle == 0) { throwException(env, kIllegalStateException, "close() has been called on the Server"); return nullptr; } return reinterpret_cast<TF_Server*>(handle); } } // namespace JNIEXPORT jlong JNICALL Java_org_tensorflow_Server_allocate( JNIEnv* env, jclass clazz, jbyteArray server_def) { TF_Status* status = TF_NewStatus(); jbyte* server_def_ptr = env->GetByteArrayElements(server_def, nullptr); TF_Server* server = TF_NewServer( server_def_ptr, static_cast<size_t>(env->GetArrayLength(server_def)), status); env->ReleaseByteArrayElements(server_def, server_def_ptr, JNI_ABORT); bool ok = throwExceptionIfNotOK(env, status); TF_DeleteStatus(status); return ok ? reinterpret_cast<jlong>(server) : 0; } JNIEXPORT void JNICALL Java_org_tensorflow_Server_start(JNIEnv* env, jclass clazz, jlong handle) { TF_Server* server = requireHandle(env, handle); if (server == nullptr) return; TF_Status* status = TF_NewStatus(); TF_ServerStart(server, status); throwExceptionIfNotOK(env, status); TF_DeleteStatus(status); } JNIEXPORT void JNICALL Java_org_tensorflow_Server_stop(JNIEnv* env, jclass clazz, jlong handle) { TF_Server* server = requireHandle(env, handle); if (server == nullptr) return; TF_Status* status = TF_NewStatus(); TF_ServerStop(server, status); throwExceptionIfNotOK(env, status); TF_DeleteStatus(status); } JNIEXPORT void JNICALL Java_org_tensorflow_Server_join(JNIEnv* env, jclass clazz, jlong handle) { TF_Server* server = requireHandle(env, handle); if (server == nullptr) return; TF_Status* status = TF_NewStatus(); TF_ServerJoin(server, status); throwExceptionIfNotOK(env, status); TF_DeleteStatus(status); } JNIEXPORT void JNICALL Java_org_tensorflow_Server_delete(JNIEnv* env, jclass clazz, jlong handle) { TF_Server* server = requireHandle(env, handle); if (server == nullptr) return; TF_DeleteServer(server); }
package org.wso2.carbon.identity.application.authenticator.oidc; import net.minidev.json.JSONArray; import net.minidev.json.JSONValue; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.oltu.oauth2.client.OAuthClient; import org.apache.oltu.oauth2.client.URLConnectionClient; import org.apache.oltu.oauth2.client.request.OAuthClientRequest; import org.apache.oltu.oauth2.client.response.OAuthAuthzResponse; import org.apache.oltu.oauth2.client.response.OAuthClientResponse; import org.apache.oltu.oauth2.common.exception.OAuthProblemException; import org.apache.oltu.oauth2.common.exception.OAuthSystemException; import org.apache.oltu.oauth2.common.message.types.GrantType; import org.apache.oltu.oauth2.common.utils.JSONUtils; import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.application.authentication.framework.AbstractApplicationAuthenticator; import org.wso2.carbon.identity.application.authentication.framework.FederatedApplicationAuthenticator; import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext; import org.wso2.carbon.identity.application.authentication.framework.exception.AuthenticationFailedException; import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser; import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants; import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkUtils; import org.wso2.carbon.identity.application.authenticator.oidc.internal.OpenIDConnectAuthenticatorServiceComponent; import org.wso2.carbon.identity.application.common.model.ClaimMapping; import org.wso2.carbon.identity.application.common.util.IdentityApplicationConstants; import org.wso2.carbon.identity.core.util.IdentityCoreConstants; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.user.api.UserRealm; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.user.core.UserStoreManager; import org.wso2.carbon.user.core.util.UserCoreUtil; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.util.HashMap; import java.util.Iterator; import java.util.Map; public class OpenIDConnectAuthenticator extends AbstractApplicationAuthenticator implements FederatedApplicationAuthenticator { private static final long serialVersionUID = -4154255583070524018L; private static Log log = LogFactory.getLog(OpenIDConnectAuthenticator.class); @Override public boolean canHandle(HttpServletRequest request) { if (log.isTraceEnabled()) { log.trace("Inside OpenIDConnectAuthenticator.canHandle()"); } // Check commonauth got an OIDC response if (request.getParameter(OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE) != null && request.getParameter(OIDCAuthenticatorConstants.OAUTH2_PARAM_STATE) != null && OIDCAuthenticatorConstants.LOGIN_TYPE.equals(getLoginType(request))) { return true; } else if (request.getParameter(OIDCAuthenticatorConstants.OAUTH2_PARAM_STATE) != null && request.getParameter(OIDCAuthenticatorConstants.OAUTH2_ERROR) != null) { //if sends error like access_denied return true; } // TODO : What if IdP failed? return false; } /** * @return */ protected String getAuthorizationServerEndpoint(Map<String, String> authenticatorProperties) { return null; } /** * @return */ protected String getCallbackUrl(Map<String, String> authenticatorProperties) { return authenticatorProperties.get(IdentityApplicationConstants.OAuth2.CALLBACK_URL); } /** * @return */ protected String getTokenEndpoint(Map<String, String> authenticatorProperties) { return null; } /** * @param state * @return */ protected String getState(String state, Map<String, String> authenticatorProperties) { return state; } /** * @return */ protected String getScope(String scope, Map<String, String> authenticatorProperties) { return scope; } /** * @return */ protected boolean requiredIDToken(Map<String, String> authenticatorProperties) { return true; } /** * * @param context * @param jsonObject * @param token * @return */ protected String getAuthenticateUser(AuthenticationContext context, Map<String, Object> jsonObject,OAuthClientResponse token) { return (String) jsonObject.get("sub"); } protected String getCallBackURL(Map<String, String> authenticatorProperties) { return getCallbackUrl(authenticatorProperties); } protected String getQueryString(Map<String, String> authenticatorProperties) { return authenticatorProperties.get(FrameworkConstants.QUERY_PARAMS); } /** * Get user info endpoint. * @param token OAuthClientResponse * @param authenticatorProperties Map<String, String> * @return User info endpoint. */ protected String getUserInfoEndpoint(OAuthClientResponse token, Map<String, String> authenticatorProperties) { return authenticatorProperties.get(IdentityApplicationConstants.Authenticator.OIDC.USER_INFO_URL); } /** * Get subject attributes. * @param token OAuthClientResponse * @param authenticatorProperties Map<String, String> * @return Map<ClaimMapping, String> Claim mappings. */ protected Map<ClaimMapping, String> getSubjectAttributes(OAuthClientResponse token, Map<String, String> authenticatorProperties) { Map<ClaimMapping, String> claims = new HashMap<ClaimMapping, String>(); try { String accessToken = token.getParam(OIDCAuthenticatorConstants.ACCESS_TOKEN); String url = getUserInfoEndpoint(token, authenticatorProperties); String json = sendRequest(url, accessToken); if (!StringUtils.isNotBlank(json)) { log.info("Unable to fetch user claims. Proceeding without user claims"); return claims; } Map<String, Object> jsonObject = JSONUtils.parseJSON(json); // Extract the inside profile JSON object. Map<String, Object> profile = JSONUtils.parseJSON( jsonObject.entrySet().iterator().next().getValue().toString()); if (profile == null) { log.info("Invalid user profile object. Proceeding without user claims"); return claims; } for (Map.Entry<String, Object> data : profile.entrySet()) { String key = data.getKey(); claims.put(ClaimMapping.build(key, key, null, false), profile.get(key).toString()); if (log.isDebugEnabled()) { log.debug("Adding claims from end-point data mapping : " + key + " - " + profile.get(key).toString()); } } } catch (Exception e) { log.error("Error occurred while accessing user info endpoint", e); } return claims; } @Override protected void initiateAuthenticationRequest(HttpServletRequest request, HttpServletResponse response, AuthenticationContext context) throws AuthenticationFailedException { try { Map<String, String> authenticatorProperties = context.getAuthenticatorProperties(); if (authenticatorProperties != null) { String clientId = authenticatorProperties.get(OIDCAuthenticatorConstants.CLIENT_ID); String authorizationEP = getAuthorizationServerEndpoint(authenticatorProperties); if (authorizationEP == null) { authorizationEP = authenticatorProperties .get(OIDCAuthenticatorConstants.OAUTH2_AUTHZ_URL); } String callbackurl = getCallbackUrl(authenticatorProperties); if (callbackurl == null) { callbackurl = IdentityUtil.getServerURL(FrameworkConstants.COMMONAUTH, true); } String state = context.getContextIdentifier() + "," + OIDCAuthenticatorConstants.LOGIN_TYPE; state = getState(state, authenticatorProperties); OAuthClientRequest authzRequest; String queryString = getQueryString(authenticatorProperties); Map<String, String> paramValueMap = new HashMap<String, String>(); if (queryString != null) { String[] params = queryString.split("&"); if (params != null && params.length > 0) { for (String param : params) { String[] intParam = param.split("="); paramValueMap.put(intParam[0], intParam[1]); } context.setProperty("oidc:param.map", paramValueMap); } } String scope = paramValueMap.get("scope"); if (scope == null) { scope = OIDCAuthenticatorConstants.OAUTH_OIDC_SCOPE; } scope = getScope(scope, authenticatorProperties); if (queryString != null && queryString.toLowerCase().contains("scope=") && queryString.toLowerCase().contains("redirect_uri=")) { authzRequest = OAuthClientRequest.authorizationLocation(authorizationEP) .setClientId(clientId) .setResponseType(OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE) .setState(state).buildQueryMessage(); } else if (queryString != null && queryString.toLowerCase().contains("scope=")) { authzRequest = OAuthClientRequest.authorizationLocation(authorizationEP) .setClientId(clientId).setRedirectURI(callbackurl) .setResponseType(OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE) .setState(state).buildQueryMessage(); } else if (queryString != null && queryString.toLowerCase().contains("redirect_uri=")) { authzRequest = OAuthClientRequest.authorizationLocation(authorizationEP) .setClientId(clientId) .setResponseType(OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE) .setScope(OIDCAuthenticatorConstants.OAUTH_OIDC_SCOPE).setState(state) .buildQueryMessage(); } else { authzRequest = OAuthClientRequest.authorizationLocation(authorizationEP) .setClientId(clientId).setRedirectURI(callbackurl) .setResponseType(OIDCAuthenticatorConstants.OAUTH2_GRANT_TYPE_CODE) .setScope(scope).setState(state).buildQueryMessage(); } String loginPage = authzRequest.getLocationUri(); String domain = request.getParameter("domain"); if (domain != null) { loginPage = loginPage + "&fidp=" + domain; } if (queryString != null) { if (!queryString.startsWith("&")) { loginPage = loginPage + "&" + queryString; } else { loginPage = loginPage + queryString; } } response.sendRedirect(loginPage); } else { if (log.isDebugEnabled()) { log.debug("Error while retrieving properties. Authenticator Properties cannot be null"); } throw new AuthenticationFailedException( "Error while retrieving properties. Authenticator Properties cannot be null"); } } catch (IOException e) { log.error("Exception while sending to the login page", e); throw new AuthenticationFailedException(e.getMessage(), e); } catch (OAuthSystemException e) { log.error("Exception while building authorization code request", e); throw new AuthenticationFailedException(e.getMessage(), e); } return; } @Override protected void processAuthenticationResponse(HttpServletRequest request, HttpServletResponse response, AuthenticationContext context) throws AuthenticationFailedException { try { Map<String, String> authenticatorProperties = context.getAuthenticatorProperties(); String clientId = authenticatorProperties.get(OIDCAuthenticatorConstants.CLIENT_ID); String clientSecret = authenticatorProperties .get(OIDCAuthenticatorConstants.CLIENT_SECRET); String tokenEndPoint = getTokenEndpoint(authenticatorProperties); if (tokenEndPoint == null) { tokenEndPoint = authenticatorProperties .get(OIDCAuthenticatorConstants.OAUTH2_TOKEN_URL); } String callbackurl = getCallbackUrl(authenticatorProperties); if (callbackurl == null) { callbackurl = IdentityUtil.getServerURL(FrameworkConstants.COMMONAUTH, true); } @SuppressWarnings({"unchecked"}) Map<String, String> paramValueMap = (Map<String, String>) context .getProperty("oidc:param.map"); if (paramValueMap != null && paramValueMap.containsKey("redirect_uri")) { callbackurl = paramValueMap.get("redirect_uri"); } OAuthAuthzResponse authzResponse = OAuthAuthzResponse.oauthCodeAuthzResponse(request); String code = authzResponse.getCode(); OAuthClientRequest accessRequest = null; accessRequest = getaccessRequest(tokenEndPoint, clientId, code, clientSecret, callbackurl); // create OAuth client that uses custom http client under the hood OAuthClient oAuthClient = new OAuthClient(new URLConnectionClient()); OAuthClientResponse oAuthResponse = null; oAuthResponse = getOauthResponse(oAuthClient, accessRequest); // TODO : return access token and id token to framework String accessToken = oAuthResponse.getParam(OIDCAuthenticatorConstants.ACCESS_TOKEN); String idToken = oAuthResponse.getParam(OIDCAuthenticatorConstants.ID_TOKEN); if (log.isDebugEnabled()) { log.debug("Retrieved the Access Token:" + accessToken + " Id Token:" + idToken); } if (accessToken != null && (idToken != null || !requiredIDToken(authenticatorProperties))) { context.setProperty(OIDCAuthenticatorConstants.ACCESS_TOKEN, accessToken); Map<String, Object> jsonObject = new HashMap<>(); if (idToken != null) { context.setProperty(OIDCAuthenticatorConstants.ID_TOKEN, idToken); String base64Body = idToken.split("\\.")[1]; byte[] decoded = Base64.decodeBase64(base64Body.getBytes()); String json = new String(decoded); jsonObject = JSONUtils.parseJSON(json); if (log.isDebugEnabled()) { log.debug("Retrieved the User Information:" + jsonObject); } if (jsonObject != null) { Map<ClaimMapping, String> claims = new HashMap<ClaimMapping, String>(); String authenticatedUser = null; String isSubjectInClaimsProp = context.getAuthenticatorProperties().get( IdentityApplicationConstants.Authenticator.SAML2SSO.IS_USER_ID_IN_CLAIMS); if ("true".equalsIgnoreCase(isSubjectInClaimsProp)) { authenticatedUser = getSubjectFromUserIDClaimURI(context); if (authenticatedUser == null) { if (log.isDebugEnabled()) { log.debug("Subject claim could not be found amongst subject attributes. " + "Defaulting to sub attribute in IDToken."); } } } if (authenticatedUser == null) { authenticatedUser = getAuthenticateUser(context,jsonObject,oAuthResponse); } if (authenticatedUser == null) { throw new AuthenticationFailedException("Cannot find federated User Identifier"); } String tenantDomain = MultitenantUtils.getTenantDomain(authenticatedUser); String domainName = UserCoreUtil.extractDomainFromName(authenticatedUser); UserStoreManager userStore; String attributeSeparator = null; try { int tenantId = OpenIDConnectAuthenticatorServiceComponent.getRealmService() .getTenantManager().getTenantId(tenantDomain); UserRealm userRealm = OpenIDConnectAuthenticatorServiceComponent.getRealmService() .getTenantUserRealm(tenantId); userStore = (UserStoreManager) userRealm.getUserStoreManager(); attributeSeparator = userStore.getSecondaryUserStoreManager(domainName) .getRealmConfiguration() .getUserStoreProperty(IdentityCoreConstants.MULTI_ATTRIBUTE_SEPARATOR); } catch (UserStoreException e) { throw new AuthenticationFailedException("Error while retrieving multi attribute " + "separator", e); } for (Map.Entry<String, Object> entry : jsonObject.entrySet()) { buildClaimMappings(claims, entry, attributeSeparator); } AuthenticatedUser authenticatedUserObj = AuthenticatedUser .createFederateAuthenticatedUserFromSubjectIdentifier(authenticatedUser); authenticatedUserObj.setUserAttributes(claims); context.setSubject(authenticatedUserObj); } else { if (log.isDebugEnabled()) { log.debug("Decoded json object is null"); } throw new AuthenticationFailedException("Decoded json object is null"); } } else { if (log.isDebugEnabled()) { log.debug("The IdToken is null"); } AuthenticatedUser authenticatedUserObj = AuthenticatedUser .createFederateAuthenticatedUserFromSubjectIdentifier(getAuthenticateUser(context, jsonObject, oAuthResponse)); authenticatedUserObj.setUserAttributes(getSubjectAttributes(oAuthResponse, authenticatorProperties)); context.setSubject(authenticatedUserObj); } } else { throw new AuthenticationFailedException("Authentication Failed"); } } catch (OAuthProblemException e) { log.error(e.getMessage(), e); throw new AuthenticationFailedException(e.getMessage(), e); } } protected void buildClaimMappings(Map<ClaimMapping, String> claims, Map.Entry<String, Object> entry, String separator) { String claimValue = null; if (StringUtils.isBlank(separator)) { separator = IdentityCoreConstants.MULTI_ATTRIBUTE_SEPARATOR_DEFAULT; } try { JSONArray jsonArray = (JSONArray) JSONValue.parseWithException(entry.getValue().toString()); if (jsonArray != null && jsonArray.size() > 0) { Iterator attributeIterator = jsonArray.iterator(); while (attributeIterator.hasNext()) { if (claimValue == null) { claimValue = attributeIterator.next().toString(); } else { claimValue = claimValue + separator + attributeIterator.next().toString(); } } } } catch (Exception e) { claimValue = entry.getValue().toString(); } claims.put(ClaimMapping.build(entry.getKey(), entry.getKey(), null, false), claimValue); if (log.isDebugEnabled()) { log.debug("Adding claim mapping : " + entry.getKey() + " <> " + entry.getKey() + " : " + claimValue); } } private OAuthClientRequest getaccessRequest(String tokenEndPoint, String clientId, String code, String clientSecret, String callbackurl) throws AuthenticationFailedException { OAuthClientRequest accessRequest = null; try { accessRequest = OAuthClientRequest.tokenLocation(tokenEndPoint) .setGrantType(GrantType.AUTHORIZATION_CODE).setClientId(clientId) .setClientSecret(clientSecret).setRedirectURI(callbackurl).setCode(code) .buildBodyMessage(); } catch (OAuthSystemException e) { if (log.isDebugEnabled()) { log.debug("Exception while building request for request access token", e); } throw new AuthenticationFailedException(e.getMessage(), e); } return accessRequest; } private OAuthClientResponse getOauthResponse(OAuthClient oAuthClient, OAuthClientRequest accessRequest) throws AuthenticationFailedException { OAuthClientResponse oAuthResponse = null; try { oAuthResponse = oAuthClient.accessToken(accessRequest); } catch (OAuthSystemException e) { if (log.isDebugEnabled()) { log.debug("Exception while requesting access token", e); } throw new AuthenticationFailedException(e.getMessage(), e); } catch (OAuthProblemException e) { if (log.isDebugEnabled()) { log.debug("Exception while requesting access token", e); } } return oAuthResponse; } @Override public String getContextIdentifier(HttpServletRequest request) { if (log.isTraceEnabled()) { log.trace("Inside OpenIDConnectAuthenticator.getContextIdentifier()"); } String state = request.getParameter(OIDCAuthenticatorConstants.OAUTH2_PARAM_STATE); if (state != null) { return state.split(",")[0]; } else { return null; } } private String getLoginType(HttpServletRequest request) { String state = request.getParameter(OIDCAuthenticatorConstants.OAUTH2_PARAM_STATE); if (state != null) { return state.split(",")[1]; } else { return null; } } @Override public String getFriendlyName() { return "openidconnect"; } @Override public String getName() { return OIDCAuthenticatorConstants.AUTHENTICATOR_NAME; } @Override public String getClaimDialectURI() { return "http://wso2.org/oidc/claim"; } /** * @subject */ protected String getSubjectFromUserIDClaimURI(AuthenticationContext context) { String subject = null; try { subject = FrameworkUtils.getFederatedSubjectFromClaims(context, getClaimDialectURI()); } catch (Exception e) { if(log.isDebugEnabled()) { log.debug("Couldn't find the subject claim from claim mappings ", e); } } return subject; } /** * Request user claims from user info endpoint. * @param url User info endpoint. * @param accessToken Access token. * @return Response string. * @throws IOException */ private String sendRequest(String url, String accessToken) throws IOException { if (log.isDebugEnabled()) { log.debug("Claim URL: " + url + " & Access-Token : " + accessToken); } if (url == null) { return StringUtils.EMPTY; } URL obj = new URL(url); HttpURLConnection urlConnection = (HttpURLConnection) obj.openConnection(); urlConnection.setRequestMethod("GET"); urlConnection.setRequestProperty("Authorization", "Bearer " + accessToken); BufferedReader reader = new BufferedReader(new InputStreamReader(urlConnection.getInputStream())); StringBuilder builder = new StringBuilder(); String inputLine = reader.readLine(); while (inputLine != null) { builder.append(inputLine).append("\n"); inputLine = reader.readLine(); } reader.close(); if (log.isDebugEnabled()) { log.debug("response: " + builder.toString()); } return builder.toString(); } }