code
stringlengths
3
1.01M
repo_name
stringlengths
5
116
path
stringlengths
3
311
language
stringclasses
30 values
license
stringclasses
15 values
size
int64
3
1.01M
<!DOCTYPE html> <html lang="{{ site.lang | default: "en-US" }}"> <head> <meta charset="UTF-8"> {% seo %} <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="theme-color" content="#157878"> <link href='https://fonts.googleapis.com/css?family=Open+Sans:400,700' rel='stylesheet' type='text/css'> <link rel="stylesheet" href="{{ '/assets/css/style.css?v=' | append: site.github.build_revision | relative_url }}"> <style> * { box-sizing: border-box; } body { margin: 0; } /* Create two equal columns that floats next to each other */ .column { float: left; width: 30%; padding: 10px; height: 300px; /* Should be removed. Only for demonstration */ } /* Clear floats after the columns */ .row:after { content: ""; display: table; clear: both; } /* Responsive layout - makes the two columns stack on top of each other instead of next to each other */ @media (max-width: 600px) { .column { width: 100%; } } </style> </head> <body> <section class="page-header"> <h1 class="project-name">{{ site.title | default: site.github.repository_name }}</h1> <h2 class="project-tagline">{{ site.description | default: site.github.project_tagline }}</h2> {% if site.github.is_project_page %} <a href="{{ site.github.repository_url }}" class="btn">View on GitHub</a> {% endif %} {% if site.show_downloads %} <a href="{{ site.github.zip_url }}" class="btn">Download .zip</a> <a href="{{ site.github.tar_url }}" class="btn">Download .tar.gz</a> {% endif %} </section> <div class="row"> <div class="column"> {% include_relative _sidebar.md %} </div> <section class="column main-content" style="width:70%"> {{ content }} <footer class="site-footer"> {% if site.github.is_project_page %} <span class="site-footer-owner"><a href="{{ site.github.repository_url }}">{{ site.github.repository_name }}</a> is maintained by <a href="{{ site.github.owner_url }}">{{ site.github.owner_name }}</a>.</span> {% endif %} <span class="site-footer-credits">This page was generated by <a href="https://pages.github.com">GitHub Pages</a>.</span> </footer> </section> <div> {% if site.google_analytics %} <script type="text/javascript"> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); ga('create', '{{ site.google_analytics }}', 'auto'); ga('send', 'pageview'); </script> {% endif %} </body> </html>
worldiety/homunculus
docs/_layouts/default.html
HTML
apache-2.0
2,866
#include <stdio.h> #include <stdlib.h> #include <sys/types.h> #include <sys/stat.h> #include <fcntl.h> /** * * create the files in batch * **/ int createWRFileInBatch (int fileHandlerArray[], int fileHandlerArraySize) { int retValue = 0; if (fileHandlerArray != NULL && fileHandlerArraySize > 0) { for (int i = 0; i < fileHandlerArraySize; i++) { fileHandlerArray[i] = -1; } for (int i = 0; i < fileHandlerArraySize; i++) { char buffer[64]; int n = sprintf (buffer, "tmp.%03d", i); fileHandlerArray[i] = open (buffer, O_CREAT | O_WRONLY | O_TRUNC, S_IRWXU); } } return retValue; } /** * * create the files in batch * **/ int createRDFileInBatch (int fileHandlerArray[], int fileHandlerArraySize) { int retValue = 0; if (fileHandlerArray != NULL && fileHandlerArraySize > 0) { for (int i = 0; i < fileHandlerArraySize; i++) { fileHandlerArray[i] = -1; } for (int i = 0; i < fileHandlerArraySize; i++) { char buffer[64]; int n = sprintf (buffer, "tmp.%03d", i); fileHandlerArray[i] = open (buffer, O_RDONLY); } } return retValue; } /** * * close the files in batch * **/ int closeFileInBatch (int fileHandlerArray[], int fileHandlerArraySize) { int retValue = 0; if (fileHandlerArray != NULL && fileHandlerArraySize > 0) { for (int i = 0; i < fileHandlerArraySize; i++) { int fd = fileHandlerArray[i]; if (fd >= 0) { close (fd); fileHandlerArray[i] = -1; } } } return retValue; } /** * * create target file (parent file) * **/ int createTargetFile (char *fileName) { int retValue = -1; retValue = open (fileName, O_WRONLY | O_CREAT | O_APPEND, S_IRWXU); return retValue; } int closeTargetfile (int fd) { int retValue = 0; if (fd > -1) { close (fd); } return retValue; } /** * * copy all children file to a parent file in sequence. * **/ int mergeFile (int targetFileHandler, int fileHandlerArray[], int fileHandlerArraySize, int useWindowsStyle) { int retValue = 0; printf("\nmerging"); if (fileHandlerArray != NULL && fileHandlerArraySize > 0 && targetFileHandler > 0) { char buffer[8096]; for (int i = 0; i < fileHandlerArraySize; i++) { int childFd = fileHandlerArray[i]; long writeCount = 0; if (childFd >= 0) { char buf[8192]; int foundNewLine = 0; while (1) { ssize_t result = read (childFd, &buf[0], sizeof (buf)); if (!result) break; //fprintf(stderr, " the read result is %d \n", result); if(foundNewLine) { int count = write (targetFileHandler, &buf[0], result); writeCount += count; printf("."); } else { for(size_t j=0; j< result -4 ; j++) { if(useWindowsStyle) { if(buf[j] == '\r' && buf[j+1] == '\n' && buf[j+2] == '\r' && buf[j+3] == '\n') { //printf ("windows %d found end at %d \n", i, j); int count = write(targetFileHandler, &buf[j+4], (result - j - 4)); foundNewLine = 1; writeCount += count; break; } } else { if(buf[j] == '\n' && buf[j+1] == '\n') { //printf (" linux %d found end at %d \n", i, j); int count = write(targetFileHandler, &buf[j+2], (result - j - 2)); foundNewLine = 1; writeCount += count; break; } } } } } //printf (" %d => %ld \n", i, writeCount); } } } return retValue; }
zephyr-yang/flashget.2017
client_file.c
C
apache-2.0
4,565
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_32) on Tue Jun 18 11:08:43 PDT 2013 --> <TITLE> SetLoadBalancerPoliciesOfListenerRequest (AWS SDK for Java - 1.4.7) </TITLE> <META NAME="date" CONTENT="2013-06-18"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../JavaDoc.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="SetLoadBalancerPoliciesOfListenerRequest (AWS SDK for Java - 1.4.7)"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> <span id="feedback_section"><h3>Did this page help you?</h3>&nbsp;&nbsp;&nbsp;<a id="feedback_yes" target="_blank">Yes</a>&nbsp;&nbsp;&nbsp;<a id="feedback_no" target="_blank">No</a>&nbsp;&nbsp;&nbsp;<a id="go_cti" target="_blank">Tell us about it...</a></span> <script type="text/javascript"> var javadoc_root_name = "/javadoc/"; var javadoc_path = location.href.substring(0, location.href.lastIndexOf(javadoc_root_name) + javadoc_root_name.length); var file_path = location.href.substring(location.href.lastIndexOf(javadoc_root_name) + javadoc_root_name.length); var feedback_yes_url = javadoc_path + "javadoc-resources/feedbackyes.html?topic_id="; var feedback_no_url = javadoc_path + "javadoc-resources/feedbackno.html?topic_id="; var feedback_tellmore_url = "https://aws-portal.amazon.com/gp/aws/html-forms-controller/documentation/aws_doc_feedback_04?service_name=Java-Ref&file_name="; if(file_path != "overview-frame.html") { var file_name = file_path.replace(/[/.]/g, '_'); document.getElementById("feedback_yes").setAttribute("href", feedback_yes_url + file_name); document.getElementById("feedback_no").setAttribute("href", feedback_no_url + file_name); document.getElementById("go_cti").setAttribute("href", feedback_tellmore_url + file_name); } else { // hide the header in overview-frame page document.getElementById("feedback_section").innerHTML = ""; } </script> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesForBackendServerResult.html" title="class in com.amazonaws.services.elasticloadbalancing.model"><B>PREV CLASS</B></A>&nbsp; &nbsp;<A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerResult.html" title="class in com.amazonaws.services.elasticloadbalancing.model"><B>NEXT CLASS</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../index.html?com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="SetLoadBalancerPoliciesOfListenerRequest.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_summary">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_detail">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <!-- ======== START OF CLASS DATA ======== --> <H2> <FONT SIZE="-1"> com.amazonaws.services.elasticloadbalancing.model</FONT> <BR> Class SetLoadBalancerPoliciesOfListenerRequest</H2> <PRE> <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">java.lang.Object</A> <IMG SRC="../../../../../resources/inherit.gif" ALT="extended by "><A HREF="../../../../../com/amazonaws/AmazonWebServiceRequest.html" title="class in com.amazonaws">com.amazonaws.AmazonWebServiceRequest</A> <IMG SRC="../../../../../resources/inherit.gif" ALT="extended by "><B>com.amazonaws.services.elasticloadbalancing.model.SetLoadBalancerPoliciesOfListenerRequest</B> </PRE> <DL> <DT><B>All Implemented Interfaces:</B> <DD><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</A></DD> </DL> <HR> <DL> <DT><PRE>public class <B>SetLoadBalancerPoliciesOfListenerRequest</B><DT>extends <A HREF="../../../../../com/amazonaws/AmazonWebServiceRequest.html" title="class in com.amazonaws">AmazonWebServiceRequest</A><DT>implements <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</A></DL> </PRE> <P> Container for the parameters to the <A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/AmazonElasticLoadBalancing.html#setLoadBalancerPoliciesOfListener(com.amazonaws.services.elasticloadbalancing.model.SetLoadBalancerPoliciesOfListenerRequest)"><CODE>SetLoadBalancerPoliciesOfListener operation</CODE></A>. <p> Associates, updates, or disables a policy with a listener on the LoadBalancer. You can associate multiple policies with a listener. </p> <P> <P> <DL> <DT><B>See Also:</B><DD><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/AmazonElasticLoadBalancing.html#setLoadBalancerPoliciesOfListener(com.amazonaws.services.elasticloadbalancing.model.SetLoadBalancerPoliciesOfListenerRequest)"><CODE>AmazonElasticLoadBalancing.setLoadBalancerPoliciesOfListener(SetLoadBalancerPoliciesOfListenerRequest)</CODE></A>, <A HREF="../../../../../serialized-form.html#com.amazonaws.services.elasticloadbalancing.model.SetLoadBalancerPoliciesOfListenerRequest">Serialized Form</A></DL> <HR> <P> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <A NAME="constructor_summary"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> <B>Constructor Summary</B></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#SetLoadBalancerPoliciesOfListenerRequest()">SetLoadBalancerPoliciesOfListenerRequest</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Default constructor for a new SetLoadBalancerPoliciesOfListenerRequest object.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#SetLoadBalancerPoliciesOfListenerRequest(java.lang.String, java.lang.Integer, java.util.List)">SetLoadBalancerPoliciesOfListenerRequest</A></B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&nbsp;loadBalancerName, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</A>&nbsp;loadBalancerPort, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</A>&lt;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&gt;&nbsp;policyNames)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Constructs a new SetLoadBalancerPoliciesOfListenerRequest object.</TD> </TR> </TABLE> &nbsp; <!-- ========== METHOD SUMMARY =========== --> <A NAME="method_summary"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> <B>Method Summary</B></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;boolean</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#equals(java.lang.Object)">equals</A></B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</A>&nbsp;obj)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A></CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#getLoadBalancerName()">getLoadBalancerName</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;The name associated with the LoadBalancer.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</A></CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#getLoadBalancerPort()">getLoadBalancerPort</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;The external port of the LoadBalancer with which this policy applies to.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</A>&lt;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&gt;</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#getPolicyNames()">getPolicyNames</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;List of policies to be associated with the listener.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;int</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#hashCode()">hashCode</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#setLoadBalancerName(java.lang.String)">setLoadBalancerName</A></B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&nbsp;loadBalancerName)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;The name associated with the LoadBalancer.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#setLoadBalancerPort(java.lang.Integer)">setLoadBalancerPort</A></B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</A>&nbsp;loadBalancerPort)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;The external port of the LoadBalancer with which this policy applies to.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#setPolicyNames(java.util.Collection)">setPolicyNames</A></B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</A>&lt;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&gt;&nbsp;policyNames)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;List of policies to be associated with the listener.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A></CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#toString()">toString</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Returns a string representation of this object; useful for testing and debugging.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;<A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html" title="class in com.amazonaws.services.elasticloadbalancing.model">SetLoadBalancerPoliciesOfListenerRequest</A></CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#withLoadBalancerName(java.lang.String)">withLoadBalancerName</A></B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&nbsp;loadBalancerName)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;The name associated with the LoadBalancer.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;<A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html" title="class in com.amazonaws.services.elasticloadbalancing.model">SetLoadBalancerPoliciesOfListenerRequest</A></CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#withLoadBalancerPort(java.lang.Integer)">withLoadBalancerPort</A></B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</A>&nbsp;loadBalancerPort)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;The external port of the LoadBalancer with which this policy applies to.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;<A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html" title="class in com.amazonaws.services.elasticloadbalancing.model">SetLoadBalancerPoliciesOfListenerRequest</A></CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#withPolicyNames(java.util.Collection)">withPolicyNames</A></B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</A>&lt;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&gt;&nbsp;policyNames)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;List of policies to be associated with the listener.</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;<A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html" title="class in com.amazonaws.services.elasticloadbalancing.model">SetLoadBalancerPoliciesOfListenerRequest</A></CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html#withPolicyNames(java.lang.String...)">withPolicyNames</A></B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>...&nbsp;policyNames)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;List of policies to be associated with the listener.</TD> </TR> </TABLE> &nbsp;<A NAME="methods_inherited_from_class_com.amazonaws.AmazonWebServiceRequest"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left"><B>Methods inherited from class com.amazonaws.<A HREF="../../../../../com/amazonaws/AmazonWebServiceRequest.html" title="class in com.amazonaws">AmazonWebServiceRequest</A></B></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE><A HREF="../../../../../com/amazonaws/AmazonWebServiceRequest.html#copyPrivateRequestParameters()">copyPrivateRequestParameters</A>, <A HREF="../../../../../com/amazonaws/AmazonWebServiceRequest.html#getRequestClientOptions()">getRequestClientOptions</A>, <A HREF="../../../../../com/amazonaws/AmazonWebServiceRequest.html#getRequestCredentials()">getRequestCredentials</A>, <A HREF="../../../../../com/amazonaws/AmazonWebServiceRequest.html#setRequestCredentials(com.amazonaws.auth.AWSCredentials)">setRequestCredentials</A></CODE></TD> </TR> </TABLE> &nbsp;<A NAME="methods_inherited_from_class_java.lang.Object"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left"><B>Methods inherited from class java.lang.<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</A></B></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true#getClass()" title="class or interface in java.lang">getClass</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true#notify()" title="class or interface in java.lang">notify</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true#notifyAll()" title="class or interface in java.lang">notifyAll</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true#wait()" title="class or interface in java.lang">wait</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true#wait(long)" title="class or interface in java.lang">wait</A>, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true#wait(long, int)" title="class or interface in java.lang">wait</A></CODE></TD> </TR> </TABLE> &nbsp; <P> <!-- ========= CONSTRUCTOR DETAIL ======== --> <A NAME="constructor_detail"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2"> <B>Constructor Detail</B></FONT></TH> </TR> </TABLE> <A NAME="SetLoadBalancerPoliciesOfListenerRequest()"><!-- --></A><H3> SetLoadBalancerPoliciesOfListenerRequest</H3> <PRE> public <B>SetLoadBalancerPoliciesOfListenerRequest</B>()</PRE> <DL> <DD>Default constructor for a new SetLoadBalancerPoliciesOfListenerRequest object. Callers should use the setter or fluent setter (with...) methods to initialize this object after creating it. <P> </DL> <HR> <A NAME="SetLoadBalancerPoliciesOfListenerRequest(java.lang.String, java.lang.Integer, java.util.List)"><!-- --></A><H3> SetLoadBalancerPoliciesOfListenerRequest</H3> <PRE> public <B>SetLoadBalancerPoliciesOfListenerRequest</B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&nbsp;loadBalancerName, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</A>&nbsp;loadBalancerPort, <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</A>&lt;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&gt;&nbsp;policyNames)</PRE> <DL> <DD>Constructs a new SetLoadBalancerPoliciesOfListenerRequest object. Callers should use the setter or fluent setter (with...) methods to initialize any additional object members. <P> <DL> <DT><B>Parameters:</B><DD><CODE>loadBalancerName</CODE> - The name associated with the LoadBalancer. The name must be unique within the client AWS account.<DD><CODE>loadBalancerPort</CODE> - The external port of the LoadBalancer with which this policy applies to.<DD><CODE>policyNames</CODE> - List of policies to be associated with the listener. Currently this list can have at most one policy. If the list is empty, the current policy is removed from the listener.</DL> </DL> <!-- ============ METHOD DETAIL ========== --> <A NAME="method_detail"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2"> <B>Method Detail</B></FONT></TH> </TR> </TABLE> <A NAME="getLoadBalancerName()"><!-- --></A><H3> getLoadBalancerName</H3> <PRE> public <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A> <B>getLoadBalancerName</B>()</PRE> <DL> <DD>The name associated with the LoadBalancer. The name must be unique within the client AWS account. <P> <DD><DL> </DL> </DD> <DD><DL> <DT><B>Returns:</B><DD>The name associated with the LoadBalancer. The name must be unique within the client AWS account.</DL> </DD> </DL> <HR> <A NAME="setLoadBalancerName(java.lang.String)"><!-- --></A><H3> setLoadBalancerName</H3> <PRE> public void <B>setLoadBalancerName</B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&nbsp;loadBalancerName)</PRE> <DL> <DD>The name associated with the LoadBalancer. The name must be unique within the client AWS account. <P> <DD><DL> </DL> </DD> <DD><DL> <DT><B>Parameters:</B><DD><CODE>loadBalancerName</CODE> - The name associated with the LoadBalancer. The name must be unique within the client AWS account.</DL> </DD> </DL> <HR> <A NAME="withLoadBalancerName(java.lang.String)"><!-- --></A><H3> withLoadBalancerName</H3> <PRE> public <A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html" title="class in com.amazonaws.services.elasticloadbalancing.model">SetLoadBalancerPoliciesOfListenerRequest</A> <B>withLoadBalancerName</B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&nbsp;loadBalancerName)</PRE> <DL> <DD>The name associated with the LoadBalancer. The name must be unique within the client AWS account. <p> Returns a reference to this object so that method calls can be chained together. <P> <DD><DL> </DL> </DD> <DD><DL> <DT><B>Parameters:</B><DD><CODE>loadBalancerName</CODE> - The name associated with the LoadBalancer. The name must be unique within the client AWS account. <DT><B>Returns:</B><DD>A reference to this updated object so that method calls can be chained together.</DL> </DD> </DL> <HR> <A NAME="getLoadBalancerPort()"><!-- --></A><H3> getLoadBalancerPort</H3> <PRE> public <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</A> <B>getLoadBalancerPort</B>()</PRE> <DL> <DD>The external port of the LoadBalancer with which this policy applies to. <P> <DD><DL> </DL> </DD> <DD><DL> <DT><B>Returns:</B><DD>The external port of the LoadBalancer with which this policy applies to.</DL> </DD> </DL> <HR> <A NAME="setLoadBalancerPort(java.lang.Integer)"><!-- --></A><H3> setLoadBalancerPort</H3> <PRE> public void <B>setLoadBalancerPort</B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</A>&nbsp;loadBalancerPort)</PRE> <DL> <DD>The external port of the LoadBalancer with which this policy applies to. <P> <DD><DL> </DL> </DD> <DD><DL> <DT><B>Parameters:</B><DD><CODE>loadBalancerPort</CODE> - The external port of the LoadBalancer with which this policy applies to.</DL> </DD> </DL> <HR> <A NAME="withLoadBalancerPort(java.lang.Integer)"><!-- --></A><H3> withLoadBalancerPort</H3> <PRE> public <A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html" title="class in com.amazonaws.services.elasticloadbalancing.model">SetLoadBalancerPoliciesOfListenerRequest</A> <B>withLoadBalancerPort</B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</A>&nbsp;loadBalancerPort)</PRE> <DL> <DD>The external port of the LoadBalancer with which this policy applies to. <p> Returns a reference to this object so that method calls can be chained together. <P> <DD><DL> </DL> </DD> <DD><DL> <DT><B>Parameters:</B><DD><CODE>loadBalancerPort</CODE> - The external port of the LoadBalancer with which this policy applies to. <DT><B>Returns:</B><DD>A reference to this updated object so that method calls can be chained together.</DL> </DD> </DL> <HR> <A NAME="getPolicyNames()"><!-- --></A><H3> getPolicyNames</H3> <PRE> public <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</A>&lt;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&gt; <B>getPolicyNames</B>()</PRE> <DL> <DD>List of policies to be associated with the listener. Currently this list can have at most one policy. If the list is empty, the current policy is removed from the listener. <P> <DD><DL> </DL> </DD> <DD><DL> <DT><B>Returns:</B><DD>List of policies to be associated with the listener. Currently this list can have at most one policy. If the list is empty, the current policy is removed from the listener.</DL> </DD> </DL> <HR> <A NAME="setPolicyNames(java.util.Collection)"><!-- --></A><H3> setPolicyNames</H3> <PRE> public void <B>setPolicyNames</B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</A>&lt;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&gt;&nbsp;policyNames)</PRE> <DL> <DD>List of policies to be associated with the listener. Currently this list can have at most one policy. If the list is empty, the current policy is removed from the listener. <P> <DD><DL> </DL> </DD> <DD><DL> <DT><B>Parameters:</B><DD><CODE>policyNames</CODE> - List of policies to be associated with the listener. Currently this list can have at most one policy. If the list is empty, the current policy is removed from the listener.</DL> </DD> </DL> <HR> <A NAME="withPolicyNames(java.lang.String...)"><!-- --></A><H3> withPolicyNames</H3> <PRE> public <A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html" title="class in com.amazonaws.services.elasticloadbalancing.model">SetLoadBalancerPoliciesOfListenerRequest</A> <B>withPolicyNames</B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>...&nbsp;policyNames)</PRE> <DL> <DD>List of policies to be associated with the listener. Currently this list can have at most one policy. If the list is empty, the current policy is removed from the listener. <p> Returns a reference to this object so that method calls can be chained together. <P> <DD><DL> </DL> </DD> <DD><DL> <DT><B>Parameters:</B><DD><CODE>policyNames</CODE> - List of policies to be associated with the listener. Currently this list can have at most one policy. If the list is empty, the current policy is removed from the listener. <DT><B>Returns:</B><DD>A reference to this updated object so that method calls can be chained together.</DL> </DD> </DL> <HR> <A NAME="withPolicyNames(java.util.Collection)"><!-- --></A><H3> withPolicyNames</H3> <PRE> public <A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html" title="class in com.amazonaws.services.elasticloadbalancing.model">SetLoadBalancerPoliciesOfListenerRequest</A> <B>withPolicyNames</B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</A>&lt;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A>&gt;&nbsp;policyNames)</PRE> <DL> <DD>List of policies to be associated with the listener. Currently this list can have at most one policy. If the list is empty, the current policy is removed from the listener. <p> Returns a reference to this object so that method calls can be chained together. <P> <DD><DL> </DL> </DD> <DD><DL> <DT><B>Parameters:</B><DD><CODE>policyNames</CODE> - List of policies to be associated with the listener. Currently this list can have at most one policy. If the list is empty, the current policy is removed from the listener. <DT><B>Returns:</B><DD>A reference to this updated object so that method calls can be chained together.</DL> </DD> </DL> <HR> <A NAME="toString()"><!-- --></A><H3> toString</H3> <PRE> public <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A> <B>toString</B>()</PRE> <DL> <DD>Returns a string representation of this object; useful for testing and debugging. <P> <DD><DL> <DT><B>Overrides:</B><DD><CODE><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang">toString</A></CODE> in class <CODE><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</A></CODE></DL> </DD> <DD><DL> <DT><B>Returns:</B><DD>A string representation of this object.<DT><B>See Also:</B><DD><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true#toString()" title="class or interface in java.lang"><CODE>Object.toString()</CODE></A></DL> </DD> </DL> <HR> <A NAME="hashCode()"><!-- --></A><H3> hashCode</H3> <PRE> public int <B>hashCode</B>()</PRE> <DL> <DD><DL> <DT><B>Overrides:</B><DD><CODE><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true#hashCode()" title="class or interface in java.lang">hashCode</A></CODE> in class <CODE><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</A></CODE></DL> </DD> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="equals(java.lang.Object)"><!-- --></A><H3> equals</H3> <PRE> public boolean <B>equals</B>(<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</A>&nbsp;obj)</PRE> <DL> <DD><DL> <DT><B>Overrides:</B><DD><CODE><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)" title="class or interface in java.lang">equals</A></CODE> in class <CODE><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</A></CODE></DL> </DD> <DD><DL> </DL> </DD> </DL> <!-- ========= END OF CLASS DATA ========= --> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> <script src="http://aws.amazon.com/js/urchin.js" type="text/javascript"></script> <script type="text/javascript">urchinTracker();</script> <!-- SiteCatalyst code version: H.25.2. Copyright 1996-2012 Adobe, Inc. All Rights Reserved. More info available at http://www.omniture.com --> <script language="JavaScript" type="text/javascript" src="https://d36cz9buwru1tt.cloudfront.net/js/sitecatalyst/s_code.min.js (view-source:https://d36cz9buwru1tt.cloudfront.net/js/sitecatalyst/s_code.min.js)" /> <script language="JavaScript" type="text/javascript"> <!-- // Documentation Service Name s.prop66='AWS SDK for Java'; s.eVar66='D=c66'; // Documentation Guide Name s.prop65='API Reference'; s.eVar65='D=c65'; var s_code=s.t();if(s_code)document.write(s_code) //--> </script> <script language="JavaScript" type="text/javascript"> <!--if(navigator.appVersion.indexOf('MSIE')>=0)document.write(unescape('%3C')+'\!-'+'-') //--> </script> <noscript> <img src="http://amazonwebservices.d2.sc.omtrdc.net/b/ss/awsamazondev/1/H.25.2--NS/0" height="1" width="1" border="0" alt="" /> </noscript> <!--/DO NOT REMOVE/--> <!-- End SiteCatalyst code version: H.25.2. --> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesForBackendServerResult.html" title="class in com.amazonaws.services.elasticloadbalancing.model"><B>PREV CLASS</B></A>&nbsp; &nbsp;<A HREF="../../../../../com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerResult.html" title="class in com.amazonaws.services.elasticloadbalancing.model"><B>NEXT CLASS</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../index.html?com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="SetLoadBalancerPoliciesOfListenerRequest.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_summary">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_detail">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> Copyright &#169; 2010 Amazon Web Services, Inc. All Rights Reserved. </BODY> </HTML>
hobinyoon/aws-java-sdk-1.4.7
documentation/javadoc/com/amazonaws/services/elasticloadbalancing/model/SetLoadBalancerPoliciesOfListenerRequest.html
HTML
apache-2.0
39,810
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import absolute_import, division, print_function, unicode_literals import os from builtins import str from textwrap import dedent from pants.backend.jvm.targets.jvm_app import JvmApp from pants.backend.jvm.targets.jvm_binary import JvmBinary from pants.base.exceptions import TargetDefinitionException from pants.base.parse_context import ParseContext from pants.build_graph.address import Address from pants.build_graph.app_base import Bundle, DirectoryReMapper from pants.source.wrapped_globs import Globs from pants_test.test_base import TestBase def _bundle(rel_path): pc = ParseContext(rel_path=rel_path, type_aliases={}) return Bundle(pc) def _globs(rel_path): pc = ParseContext(rel_path=rel_path, type_aliases={}) return Globs(pc) class JvmAppTest(TestBase): def test_simple(self): binary_target = self.make_target(':foo-binary', JvmBinary, main='com.example.Foo') app_target = self.make_target(':foo', JvmApp, basename='foo-app', binary=':foo-binary') self.assertEqual('foo-app', app_target.payload.basename) self.assertEqual('foo-app', app_target.basename) self.assertEqual(binary_target, app_target.binary) self.assertEqual([':foo-binary'], list(app_target.compute_dependency_specs(payload=app_target.payload))) def test_jvmapp_bundle_payload_fields(self): app_target = self.make_target(':foo_payload', JvmApp, basename='foo-payload-app', archive='zip') self.assertEqual('foo-payload-app', app_target.payload.basename) self.assertIsNone(app_target.payload.deployjar) self.assertEqual('zip', app_target.payload.archive) def test_bad_basename(self): with self.assertRaisesRegexp(TargetDefinitionException, r'Invalid target JvmApp.* basename must not equal name.'): self.make_target(':foo', JvmApp, basename='foo') def create_app(self, rel_path, name=None, **kwargs): self.create_file(os.path.join(rel_path, 'config/densities.xml')) return self.make_target(Address(rel_path, name or 'app').spec, JvmApp, bundles=[_bundle(rel_path)(fileset='config/densities.xml')], **kwargs) def test_binary_via_binary(self): bin = self.make_target('src/java/org/archimedes/buoyancy:bin', JvmBinary) app = self.create_app('src/java/org/archimedes/buoyancy', binary=':bin') self.assertEqual(app.binary, bin) def test_binary_via_dependencies(self): bin = self.make_target('src/java/org/archimedes/buoyancy:bin', JvmBinary) app = self.create_app('src/java/org/archimedes/buoyancy', dependencies=[bin]) self.assertEqual(app.binary, bin) def test_degenerate_binaries(self): bin = self.make_target('src/java/org/archimedes/buoyancy:bin', JvmBinary) app = self.create_app('src/java/org/archimedes/buoyancy', binary=':bin', dependencies=[bin]) self.assertEqual(app.binary, bin) def test_no_binary(self): app = self.create_app('src/java/org/archimedes/buoyancy') with self.assertRaisesRegexp(TargetDefinitionException, r'Invalid target JvmApp.*src/java/org/archimedes/buoyancy:app\).*' r' An app must define exactly one'): app.binary def test_too_many_binaries_mixed(self): self.make_target('src/java/org/archimedes/buoyancy:bin', JvmBinary) bin2 = self.make_target('src/java/org/archimedes/buoyancy:bin2', JvmBinary) app = self.create_app('src/java/org/archimedes/buoyancy', binary=':bin', dependencies=[bin2]) with self.assertRaisesRegexp(TargetDefinitionException, r'Invalid target JvmApp.*src/java/org/archimedes/buoyancy:app\).*' r' An app must define exactly one'): app.binary def test_too_many_binaries_via_deps(self): bin = self.make_target('src/java/org/archimedes/buoyancy:bin', JvmBinary) bin2 = self.make_target('src/java/org/archimedes/buoyancy:bin2', JvmBinary) app = self.create_app('src/java/org/archimedes/buoyancy', dependencies=[bin, bin2]) with self.assertRaisesRegexp(TargetDefinitionException, r'Invalid target JvmApp.*src/java/org/archimedes/buoyancy:app\).*' r' An app must define exactly one'): app.binary def test_not_a_binary(self): self.make_target('src/java/org/archimedes/buoyancy:bin', JvmBinary) self.create_app('src/java/org/archimedes/buoyancy', name='app', binary=':bin') app = self.create_app('src/java/org/archimedes/buoyancy', name='app2', binary=':app') with self.assertRaisesRegexp(TargetDefinitionException, r'Invalid target JvmApp.*src/java/org/archimedes/buoyancy:app2\).*' r' Expected binary dependency'): app.binary class BundleTest(TestBase): def test_bundle_filemap_dest_bypath(self): spec_path = 'src/java/org/archimedes/buoyancy' densities = self.create_file(os.path.join(spec_path, 'config/densities.xml')) unused = self.make_target(Address(spec_path, 'unused').spec, JvmBinary) app = self.make_target(spec_path, JvmApp, dependencies=[unused], bundles=[_bundle(spec_path)(fileset='config/densities.xml')]) self.assertEqual(1, len(app.bundles)) # after one big refactor, ../../../../../ snuck into this path: self.assertEqual({densities: 'config/densities.xml'}, app.bundles[0].filemap) def test_bundle_filemap_dest_byglobs(self): spec_path = 'src/java/org/archimedes/tub' one = self.create_file(os.path.join(spec_path, 'config/one.xml')) two = self.create_file(os.path.join(spec_path, 'config/two.xml')) unused = self.make_target(Address(spec_path, 'unused').spec, JvmBinary) globs = _globs(spec_path) app = self.make_target(spec_path, JvmApp, dependencies=[unused], bundles=[_bundle(spec_path)(fileset=globs('config/*.xml'))]) self.assertEqual(1, len(app.bundles)) self.assertEqual({one: 'config/one.xml', two: 'config/two.xml'}, app.bundles[0].filemap) def test_bundle_filemap_dest_relative(self): spec_path = 'src/java/org/archimedes/crown' five = self.create_file(os.path.join(spec_path, 'gold/config/five.xml')) unused = self.make_target(Address(spec_path, 'unused').spec, JvmBinary) app = self.make_target(spec_path, JvmApp, dependencies=[unused], bundles=[_bundle(spec_path)(relative_to='gold', fileset='gold/config/five.xml')]) self.assertEqual(1, len(app.bundles)) self.assertEqual({five: 'config/five.xml'}, app.bundles[0].filemap) def test_bundle_filemap_dest_remap(self): spec_path = 'src/java/org/archimedes/crown' one = self.create_file(os.path.join(spec_path, 'config/one.xml')) unused = self.make_target(Address(spec_path, 'unused').spec, JvmBinary) mapper = DirectoryReMapper(os.path.join(spec_path, 'config'), 'gold/config') app = self.make_target(spec_path, JvmApp, dependencies=[unused], bundles=[_bundle(spec_path)(mapper=mapper, fileset='config/one.xml')]) self.assertEqual(1, len(app.bundles)) self.assertEqual({one: 'gold/config/one.xml'}, app.bundles[0].filemap) def test_bundle_filemap_remap_base_not_exists(self): # Create directly with self.assertRaises(DirectoryReMapper.NonexistentBaseError): DirectoryReMapper("dummy/src/java/org/archimedes/crown/missing", "dummy") def test_bundle_add(self): spec_path = 'src/java/org/archimedes/volume' stone_dense = self.create_file(os.path.join(spec_path, 'config/stone/dense.xml')) metal_dense = self.create_file(os.path.join(spec_path, 'config/metal/dense.xml')) unused = self.make_target(Address(spec_path, 'unused').spec, JvmBinary) bundle = _bundle(spec_path)(relative_to='config', fileset=['config/stone/dense.xml', 'config/metal/dense.xml']) app = self.make_target(spec_path, JvmApp, dependencies=[unused], bundles=[bundle]) self.assertEqual(1, len(app.bundles)) self.assertEqual({stone_dense: 'stone/dense.xml', metal_dense: 'metal/dense.xml'}, app.bundles[0].filemap) def test_multiple_bundles(self): spec_path = 'src/java/org/archimedes/volume' stone_dense = self.create_file(os.path.join(spec_path, 'config/stone/dense.xml')) metal_dense = self.create_file(os.path.join(spec_path, 'config/metal/dense.xml')) unused = self.make_target(Address(spec_path, 'unused').spec, JvmBinary) self.add_to_build_file('src/java/org/archimedes/volume/BUILD', dedent(""" jvm_app(name='volume', dependencies=[':unused'], bundles=[ bundle(relative_to='config', fileset='config/stone/dense.xml') ] ) jvm_app(name='bathtub', dependencies=[':unused'], bundles=[ bundle(fileset='config/metal/dense.xml') ] ) """)) app1 = self.make_target(Address(spec_path, 'app1').spec, JvmApp, dependencies=[unused], bundles=[_bundle(spec_path)(relative_to='config', fileset='config/stone/dense.xml')]) app2 = self.make_target(Address(spec_path, 'app2').spec, JvmApp, dependencies=[unused], bundles=[_bundle(spec_path)(fileset='config/metal/dense.xml')]) self.assertEqual(1, len(app1.bundles)) self.assertEqual({stone_dense: 'stone/dense.xml'}, app1.bundles[0].filemap) self.assertEqual(1, len(app2.bundles)) self.assertEqual({metal_dense: 'config/metal/dense.xml'}, app2.bundles[0].filemap) def test_globs_relative_to_build_root(self): spec_path = 'y' unused = self.make_target(spec_path, JvmBinary) globs = _globs(spec_path) app = self.make_target('y:app', JvmApp, dependencies=[unused], bundles=[ _bundle(spec_path)(fileset=globs("z/*")), _bundle(spec_path)(fileset=['a/b']) ]) self.assertEqual(['y/a/b', 'y/z/*'], sorted(app.globs_relative_to_buildroot()['globs'])) def test_list_of_globs_fails(self): # It's not allowed according to the docs, and will behave badly. spec_path = 'y' globs = _globs(spec_path) with self.assertRaises(ValueError): _bundle(spec_path)(fileset=[globs("z/*")]) def test_jvmapp_fingerprinting(self): spec_path = 'y' globs = _globs(spec_path) self.create_file(os.path.join(spec_path, 'one.xml')) self.create_file(os.path.join(spec_path, 'config/two.xml')) def calc_fingerprint(): # Globs are eagerly, therefore we need to recreate target to recalculate fingerprint. self.reset_build_graph() app = self.make_target('y:app', JvmApp, dependencies=[], bundles=[ _bundle(spec_path)(fileset=globs("*")) ]) return app.payload.fingerprint() fingerprint_before = calc_fingerprint() os.mkdir(os.path.join(self.build_root, spec_path, 'folder_one')) self.assertEqual(fingerprint_before, calc_fingerprint()) self.create_file(os.path.join(spec_path, 'three.xml')) self.assertNotEqual(fingerprint_before, calc_fingerprint()) def test_jvmapp_fingerprinting_with_non_existing_files(self): spec_path = 'y' def calc_fingerprint(): self.reset_build_graph() return self.make_target('y:app', JvmApp, dependencies=[], bundles=[ _bundle(spec_path)(fileset=['one.xml']) ]).payload.fingerprint() fingerprint_non_existing_file = calc_fingerprint() self.create_file(os.path.join(spec_path, 'one.xml')) fingerprint_empty_file = calc_fingerprint() self.create_file(os.path.join(spec_path, 'one.xml'), contents='some content') fingerprint_file_with_content = calc_fingerprint() self.assertNotEqual(fingerprint_empty_file, fingerprint_non_existing_file) self.assertNotEqual(fingerprint_empty_file, fingerprint_file_with_content) self.assertNotEqual(fingerprint_file_with_content, fingerprint_empty_file) def test_rel_path_with_glob_fails(self): # Globs are treated as eager, so rel_path doesn't affect their meaning. # The effect of this is likely to be confusing, so disallow it. spec_path = 'y' self.create_file(os.path.join(spec_path, 'z', 'somefile')) globs = _globs(spec_path) with self.assertRaises(ValueError) as cm: _bundle(spec_path)(rel_path="config", fileset=globs('z/*')) self.assertIn("Must not use a glob for 'fileset' with 'rel_path'.", str(cm.exception)) def test_allow_globs_when_rel_root_matches_rel_path(self): # If a glob has the same rel_root as the rel_path, then # it will correctly pick up the right files. # We don't allow BUILD files to have declarations with this state. # But filesets can be created this way via macros or pants internals. self.create_file(os.path.join('y', 'z', 'somefile')) bundle = _bundle('y')(rel_path="y/z", fileset=_globs('y/z')('*')) self.assertEqual({'globs': [u'y/z/*']}, bundle.fileset.filespec) def test_rel_path_overrides_context_rel_path_for_explicit_path(self): spec_path = 'y' unused = self.make_target(spec_path, JvmBinary) app = self.make_target('y:app', JvmApp, dependencies=[unused], bundles=[ _bundle(spec_path)(rel_path="config", fileset=['a/b']) ]) self.assertEqual({os.path.join(self.build_root, 'config/a/b'): 'a/b'}, app.bundles[0].filemap) self.assertEqual(['config/a/b'], sorted(app.globs_relative_to_buildroot()['globs']))
twitter/pants
tests/python/pants_test/backend/jvm/targets/test_jvm_app.py
Python
apache-2.0
14,693
<!DOCTYPE html> <html> <head> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" /> <meta http-equiv=Content-Type content="text/html;charset=utf-8"> <meta property="wb:webmaster" content="e635a420782119b7" /> <meta name="keywords" content="可穿戴,可穿戴设备,百度可穿戴设备,百度可穿戴,智能设备,智能可穿戴设备,超智能设备,百度智能设备,便携设备,便携智能设备,百度便携设备, 人体设备,智能人体设备,百度人体设备,便携人体设备,dulife,dulife平台,奇酷网,奇酷,360奇酷,小米酷玩,小米酷玩频道,百度硬件,智能硬件,硬件,智能移动设备,智能移动硬件 移动设备,移动硬件,可穿戴硬件,点名时间,母亲节"> <meta name="description" content="百度未来商店作为行业内首个基于生活需求和场景的智能硬件信息互动平台,秉承“科技改变生活”的理念,通过智能硬件提升人们在运动、睡眠、生活、出行、娱乐等方面的品质,让用户更有效的关爱自身健康和家庭生活,让科技真正融入到老百姓的生活中。平台将围绕可穿戴、智能家居、安全出行等几大领域,提供最前沿、最时尚、最实用、最专业的硬件产品介绍、服务定制、最新资讯、圈子交流等服务,打造行业最专业的智能硬件信息互动平台。"> <!--[if lte IE 7]> <div class="goodbye-modal hide"></div> <div class="goodbye-ie hide" id="goodbyeIE"> <p>您的浏览器太旧啦~为了获得更好的体验,强烈建议您使用以下浏览器:</p> <ul class="browers clearfix"> <li class="chrome"> <a target="_blank" href="https://www.google.com/intl/en/chrome/browser/"></a> <span>chrome</span> </li> <li class="firefox"> <a target="_blank" href="http://www.firefox.com.cn/download/"></a> <span>firefox</span> </li> <li class="ie9"> <a target="_blank" href="http://windows.microsoft.com/zh-cn/internet-explorer/download-ie"></a> <span>IE9+</span> </li> </ul> <p class="no-tip"><a id="iknow" href="javascript:void(0);">知道啦</a></p> </div> <![endif]--> <script> void function(g,f,j,c,h,d,b){g.alogObjectName=h,g[h]=g[h]||function(){(g[h].q=g[h].q||[]).push(arguments)},g[h].l=g[h].l||+new Date,d=f.createElement(j),d.async=!0,d.src=c,b=f.getElementsByTagName(j)[0],b.parentNode.insertBefore(d,b)}(window,document,"script","http://img.baidu.com/hunter/alog/alog.min.js","alog");void function(){function c(){return;}window.PDC={mark:function(a,b){alog("speed.set",a,b||+new Date);alog.fire&&alog.fire("mark")},init:function(a){alog("speed.set","options",a)},view_start:c,tti:c,page_ready:c}}();void function(n){var o=!1;n.onerror=function(n,e,t,c){var i=!0;return!e&&/^script error/i.test(n)&&(o?i=!1:o=!0),i&&alog("exception.send","exception",{msg:n,js:e,ln:t,col:c}),!1},alog("exception.on","catch",function(n){alog("exception.send","exception",{msg:n.msg,js:n.path,ln:n.ln,method:n.method,flag:"catch"})})}(window); </script> <link rel="icon" href="/static/common/favicon.ico" type="image/x-icon"> <link rel="shorticon icon" href="/static/common/favicon.ico" type="image/x-icon"> <meta property="wb:webmaster" content="18de27f07b76316f" /> <meta name="baidu-tc-cerfication" content="a7fc1bfc58a8b04c738569e2d3c418dc" /> <meta name="baidu-site-verification" content="OZIMDr2iVS" /> <script type="text/javascript"> window.duin = window.duin || {}; duin.userinfo = { islogin:'0', displayname: "", role: "", avatar: "" //头像 }; window.login_wrapper_url = 'http://passport.baidu.com/passApi/js/uni_login_wrapper.js?cdnversion=201411181717'; var _hmt = _hmt || []; _hmt.push(['_setAccount', '3d1ae725881ab60e9d4d62efe134498c']); </script> <title>百度未来商店-传华硕ZenWatch智能手表首批上市仅300块</title> <link rel="stylesheet" type="text/css" href="/static/common/pkg/common_aa2cbaf.css"/><link rel="stylesheet" type="text/css" href="/static/discovery/pkg/single-artical_09c7d24.css"/></head> <body> <script> alog('speed.set', 'ht', +new Date); </script> <script>with(document)0[(getElementsByTagName('head')[0]||body).appendChild(createElement('script')).src='http://img.baidu.com/hunter/kaifangyun.js?st='+~(new Date()/864e5)];</script> <script> window.hostType = "news"; window.hostId = "3369"; </script> <div class="nav" > <div class="nav-header clearfix"> <a href="/" class="fl-l nav-logo"></a> <div class="nav-right clearfix"> <div class="nav-info"> <span class="nav-not-login"><a href="#" class="nav-login">登录</a></span></div> <ul class="nav-list clearfix"> <li class="nav-home-item"><a href="/" class="">首页</a></li> <li class="nav-store-wrapper"><div class="nav-store">全部设备</div> <ul class="nav-store-list"> <li><a href="/sports">运动</a></li> <li><a href="/parlor">客厅</a></li> <li><a href="/sleep">睡眠</a></li> </ul> </li> <li class="nav-hot-item"><a class=" " href="/tryout/list">试用</a><span class="nav-hot"></span></li> <li><a class="" href="/discovery-news">发现</a></li> <li class="nav-last-li"><a class="" href="/product/ushome">合作</a></li> </ul> <div class="nav-search "> <form action="/search/index" id="searchForm" method="POST"> <input type="hidden" name="pn" value="1"> <input type="hidden" name="limit" value="12"> <input type="text" class="nav-search-input" autocomplete="off" name="scontent" value=""> <span class="nav-search-icon"></span> </form> </div> </div> </div> </div> <div class="d-main"> <div class="bread-path"> <a href="/">首页</a><span class="icon-path">&gt;</span><a href="/discovery-news">发现</a><span class="icon-path">&gt;</span><span>传华硕ZenWatch智能手表首批上市仅300块</span> </div> <div class="d-out-ad" id="friend"> <ul class="d-out-ad-wrapper"> <li><a href="http://store.baidu.com/trysome/view/2.html" target="_blank"><img src="http://bs.baidu.com/dulife/54698d532f196.jpg" height="100" width="960" title="未来试用第二期"></a></li> </ul> </div> <div class="d-content clearfix"> <div class="d-content-sub"> <div class="d-hot-recommend"> <div class="d-h-r-head"> <a href="javascript:void(0);" class="d-h-normal d-h-active">热门资讯</a> <span class="d-h-split"></span> <a href="javascript:void(0);" class="d-h-normal">小编推荐</a></div> <div class="d-h-r-content"> <div class="d-h-r-c-wrapper" id="hotEvaluation"> </div> <div class="d-h-r-c-wrapper" id="editorRecommend"> </div> </div> </div> <div class="d-best-recommend" id='bestRecommendWrapper'> </div> <div class="d-guess-like"> <div class="d-g-l-header clearfix"> <a href="javascript:;" class="d-g-l-change">换一批</a> <div class="d-g-l-title">猜你喜欢</div> </div> <div class="d-g-l-content" id="uLike"> </div> </div> </div> <div class="d-content-main"> <div class="d-artical-title">传华硕ZenWatch智能手表首批上市仅300块</div> <div class="d-a-info clearfix"> <div class="d-a-operation"><a href="#comment" class="d-a-o-comment">评论(<span id="commentCount">0</span>)</a><a href="#dLikeHash" class="d-a-o-like">喜欢(<span id="likeCount">0</span>)</a><span class="d-share clearfix"><span class="bdsharebuttonbox"><a href="#" class="bds_more" data-cmd="more">分享<span class="d-down-icon"></span></a></span></span></div> <div class="d-a-from"> <span class="d-a-f-name">来源:安珀</span> <span class="d-public-time" id="modifyTime"></span> </div> </div> <div class="d-summary">首批上市的产品数量极为有限,仅有不超过300块,并且将仅在其本土中国台湾地区发售。</div> <div class="d-artical-content" id="sourceContent"> <p>这款华硕智能手表ZenWatch配有1.63英寸、分辨率为320&times;320的屏幕,并且覆盖有一层康宁第三代Gorilla玻璃进行保护。运行内存为512MB,而机身存储内存则为4GB,搭载高通骁龙400处理器,将内置Android Wear OS定制而来的ZenUI。 此外,这款智能手表还支持IP55级别防尘防水功能,机身颜色为银色和玫瑰金两种。</p> <p><img src="http://bs.baidu.com/dulife/54364806aac77.jpg" alt="" /></p> <p>首批上市的产品数量极为有限,仅有不超过300块,并且将仅在其本土中国台湾地区发售。</p> </div> <div class="d-fav"> <a href="javascript:;" class="d-fav-like"><span class="d-fav-icon" id="dLikeHash"></span><span class="d-fav-count">0</span></a> <span class="d-share clearfix"> <span class="bdsharebuttonbox"><a href="#" class="bds_more d-s-icon" data-cmd="more"><i></i>分享</a></span> </span> </div> <div class="d-link clearfix"> <div class="d-prev"> <span>上一篇:</span><a href="/news/3368.html" target="_blank">带触摸屏的多合一电子卡推出 1张=20张银行卡</a> </div> <div class="d-next"> <span>下一篇:</span><a href="/news/3370.html" target="_blank">联想智能手环Smartband已在FCC和Bluetooth SIG手上</a> </div> </div> <div class="d-sort-comment" id="comment">短评<span>(</span><span id="ctCount">0</span><span>)</span></div> <form action="/news/api/addComment" method="POST " class="ct-form" name="ct-form" data-hostid="3369" data-committype="" data-hosttype="news"> <div class="clearfix"><textarea name="comment" aria-required="true" placeholder="同学,你怎么看?"></textarea></div> <div class="ct-submit"> <span class="ct-count"><span class="prefix">还能输入</span>&nbsp;<strong class="ct-limit">150</strong>&nbsp;个字</span><button type="submit" class="btn btn-primary">提交</button> </div> </form> <ul class="ct-list" id="ct-list-full"> <li class="empty hide">还木有评论哦,快来抢沙发吧~~</li> </ul> <div id="ct-page"></div> <script type="text/template" id="ct-tpl"> <% $.each(list, function(idx, item) { %> <li class="clearfix"> <div class="avatar"><img src="<%- item.user_icon %>" width="60" height="60" title="<%- item.user_name %>"></div> <div class="cont"> <div class="ut"> <span class="uname text-overflow"><%- item.user_name %></span><span class="date"><%- item.create_time %></span> </div> <%if(item.parent) { %> <div class="quote"> <div class="uname">@&nbsp;<span><%- item.parent.user_name %></span></div> <div class="qct"><%- item.parent.content %></div> </div> <% } %> <div class="ct"><%- item.content %></div> <%if(!item.parent) { %> <div class="tb"><a href="#" data-pid="<%- item.reply_id %>"><i></i>回复</a></div> <% } %> </div> </li> <% }); %> </script> </div> </div> </div> <script> window._bd_share_config={ "common":{"bdSnsKey":{}, "bdText":"", "bdMini":"1", "bdMiniList":["tsina","qzone","weixin","renren","tqq","douban","sqq"], "bdPic":"", "bdStyle":"0", "bdSize":"32", },"share":{} }; with(document)0[(getElementsByTagName('head')[0]||body).appendChild(createElement('script')).src='http://bdimg.share.baidu.com/static/api/js/share.js?v=86835285.js?cdnversion='+~(-new Date()/36e5)]; </script> <script type="text/template" id="hotRecommend"> <%if(!_.isEmpty(list)){ %> <ul class="d-h-r-c clearfix"> <% $.each(list, function(idx, item) { %> <li <%if(idx<3){ %> class="d-h-top3"<% } %>> <span class="d-h-r-c-no"><%-idx+1%></span> <a href="/news/<%-item.id%>.html" target="_blank"><%-item.title%></a> </li> <% }); %> </ul> <% } %> </script> <script type="text/template" id="guessLike"> <%if(!_.isEmpty(list)){ %> <ul> <% $.each(list, function(idx, item) { %> <li class="clearfix"> <a href="/product/view/<%-item.product_id%>.html" class="d-thumbnail" style="background-image:url(<%-item.product_cover_img%>) " target="_blank"> </a> <div class="d-g-l-c"> <a href="/product/view/<%-item.product_id%>.html" class="d-g-l-c-title" target="_blank"><%-item.product_name%></a> <a href="/product/view/<%-item.product_id%>.html#evaluation" class="d-g-l-c-write" target="_blank">写评测</a> </div> </li> <% }); %> </ul> <% } %> </script> <script type="text/template" id="guessLikeNews"> <%if(!_.isEmpty(list)){ %> <ul> <% $.each(list, function(idx, item) { %> <li class="clearfix"> <a href="/news/<%-item.id%>.html" target="_blank" class="d-thumbnail" style="background-image:url(<%-item.thumbnails%>);" > </a> <div class="d-g-l-c"> <a href="/news/<%-item.id%>.html" target="_blank" class="d-g-l-c-title"><%-item.title%></a> <a href="/news/<%-item.id%>.html#comment" target="_blank" class="d-g-l-c-count"><%-item.comment_count%></a> </div> </li> <% }); %> </ul> <% } %> </script> <script type="text/template" id="bestRecommend"> <%if(!_.isEmpty(list)){ %> <div class="d-b-r-head clearfix"> <div class="d-b-r-operator"> <% $.each(list, function(idx, item) { %> <a href="javascript:;" class="d-b-r-o-icon <%if(idx==0){ %> active <% } %>"></a> <% }); %> </div> <div class="d-b-r-title">精品推荐</div></div> <div class="d-b-r-content clearfix"> <% $.each(list, function(idx, item) { %> <div class="d-b-r-c"> <a href="/product/view/<%-item.product_id%>.html" target="_blank"><img src="<%-item.product_cover_img%>"></a> <a href="/product/view/<%-item.product_id%>.html" target="_blank" class="d-b-r-c-desc"> <%-item.product_name%> </a> </div> <% }); %> </div> <% } %> </script> <div class="footer clearfix"> <p class="fl-l">©2014 baidu<a href="http://home.baidu.com/" target="_blank">关于百度</a> <a href="/about" target="_blank">关于我们</a> <a href="http://www.baidu.com/duty/" target="_blank">使用前必读</a> <a href="/admin/index" target="_blank" class="hide" id="admin-entry">管理</a> <a href="/product/uscreateProduct?product_id=" target="_blank" class="hide" id="admin-product-edit">编辑</a> <a href="/product/usprovision" class="hide" id="admin-provision">服务条款</a> </p> <div class="fl-r link"> <a href="http://weibo.com/dulife" target="_blank" class="sina icon">新浪微博</a><a href="https://wx.qq.com/" target="_blank" class="weixin icon" style="visibility:hidden;">微信</a> </div> </div> <div class="backtop"></div> </body><script type="text/javascript" src="/static/common/pkg/common_56f87e5.js"></script> <script type="text/javascript" src="/static/discovery/pkg/discovery_baf8100.js"></script> <script type="text/javascript">!function(){var cookieName = 'duin_ie_tip'; if (document.cookie.indexOf(cookieName) === -1) { $('.goodbye-modal').show(); $('.goodbye-ie').show(); } $('#iknow').click(function() { $('.goodbye-modal').hide(); $('.goodbye-ie').hide(); document.cookie = cookieName + (+new Date); }); }(); !function(){ var href = location.href; $('.nav-login').click(function(e) { e.preventDefault(); duin.login(); }); $('.nav-logout').attr('href', 'http://passport.baidu.com?logout&tpl=mco_web&u=' + href); }(); !function(){require('common:widget/nav/nav.js'); duin.nav.searchFuc(); duin.nav.adjustWidth(); }(); !function(){require('common:widget/footer/footer.js'); }(); !function(){try { if (console && console.log) { console.log('%c', 'padding:12px 59px;line-height:60px;'); console.log('\n想挑战规模够大、协作够复杂的前端团队协作?\n' + '想寻求理念够新、自由度够高的前端成长空间?\n' + '想把前端做成极富创造力、极富成就感的一份工作?\n' + 'or 前端不仅仅是一份工作,而是你的理想和全部?\n\n' + '快到碗里来!\n' + '有些事,我们不做,就没人做了!\n' + '你不来,我们就没办法一起快乐玩耍了!\n' + '学历、年龄、性别、取向、节操通通不限,产品/代码洁癖患者优先考虑。\n'); console.info && console.info('请将简历发送至 %c spacehr@baidu.com( 邮件标题请以“姓名-前端-来自百度未来商店”命名)', 'color:red'); console.info && console.info('职位介绍:http://dwz.cn/mbufe'); } } catch (e) {} }(); !function(){ window.modifyTime = duin.dateUtil.format('yyyy-MM-dd HH:mm:ss', new Date(parseInt('1412843570')*1000)).substring(0, 10); }(); !function(){ jQuery(function() { alog('speed.set', 'drt', +new Date); }); }();</script><div class="hide"> <script type="text/javascript"> var _bdhmProtocol = (("https:" == document.location.protocol) ? " https://" : " http://"); document.write(unescape("%3Cscript src='" + _bdhmProtocol + "hm.baidu.com/h.js%3F0696c093b1f3ccb4153ab2e25fa7f411' type='text/javascript'%3E%3C/script%3E")); </script> </div> <div class="hide"> <script type="text/javascript"> var _bdhmProtocol = (("https:" == document.location.protocol) ? " https://" : " http://"); document.write(unescape("%3Cscript src='" + _bdhmProtocol + "hm.baidu.com/h.js%3F3d1ae725881ab60e9d4d62efe134498c' type='text/javascript'%3E%3C/script%3E")); </script> </div> <div class="hide"> <script type="text/javascript"> var _bdhmProtocol = (("https:" == document.location.protocol) ? " https://" : " http://"); document.write(unescape("%3Cscript src='" + _bdhmProtocol + "hm.baidu.com/h.js%3F91e35cba3b09a5afffa12b0db4325bee' type='text/javascript'%3E%3C/script%3E")); </script> </div> </html><!--10563710400507075594111817--> <script> var _trace_page_logid = 1056371040; </script><!--10563663250472472586111817--> <script> var _trace_page_logid = 1056366325; </script>
jiangerji/my-utils
store.baidu.com/cache/html/3369.html
HTML
apache-2.0
17,342
package org.opencb.biodata.tools.feature; import org.broad.igv.bbfile.*; import org.opencb.biodata.models.core.Region; import org.opencb.commons.utils.FileUtils; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; /** * Created by imedina on 25/11/16. */ public class BigWigManager { private Path bigWigFilePath; private BBFileReader bbFileReader; private List<Integer> zoomWindowSizes; @Deprecated private Path indexPath; @Deprecated public static final String BIGWIG_DB = "bigwig.db"; /** * Constructor. * * @param bigwigPath Path to the Big Wig file * @throws IOException */ public BigWigManager(Path bigwigPath) throws IOException { this.bigWigFilePath = bigwigPath; init(); } @Deprecated public BigWigManager(Path bigwigPath, Path indexPath) throws IOException { this.bigWigFilePath = bigwigPath; this.indexPath = indexPath; init(); } private void init() throws IOException { FileUtils.checkPath(this.bigWigFilePath); bbFileReader = new BBFileReader(this.bigWigFilePath.toString()); zoomWindowSizes = new ArrayList<>(); for (int zoomLevel = 1; zoomLevel <= bbFileReader.getZoomLevelCount(); zoomLevel++) { zoomWindowSizes.add(bbFileReader.getZoomLevels().getZoomLevelHeader(zoomLevel).getReductionLevel()); } } /** * Query by a given region. * * @param region Region target * @return Array of double values for that region * @throws IOException */ public double[] query(Region region) throws IOException { BigWigIterator bigWigIterator = iterator(region); double[] values = new double[region.getEnd() - region.getStart() + 1]; while (bigWigIterator.hasNext()) { WigItem wigItem = bigWigIterator.next(); for (int i = wigItem.getStartBase(), j = wigItem.getStartBase() - region.getStart(); i <= region.getEnd(); i++, j++) { values[j] = wigItem.getWigValue(); } } return values; } /** * Get the iterator for the given region. * * @param region Region target * @return Big Wig file iterator */ public BigWigIterator iterator(Region region) { // Sanity check WigUtils.validateRegion(region, bbFileReader); return bbFileReader.getBigWigIterator(region.getChromosome(), region.getStart(), region.getChromosome(), region.getEnd() + 1, false); } public ZoomLevelIterator iterator(Region region, int zoomLevel) { // Sanity check WigUtils.validateRegion(region, bbFileReader); return bbFileReader.getZoomLevelIterator(zoomLevel, region.getChromosome(), region.getStart(), region.getChromosome(), region.getEnd(), false); } public double[] groupBy(Region region, int windowSize) { int zoomLevel = -1; for (int level = 0; level < zoomWindowSizes.size(); level++) { if (windowSize < zoomWindowSizes.get(level)) { break; } zoomLevel++; } // Calculate the number of needed windows, ensure windowSize => 1 windowSize = Math.max(1, windowSize); int start = region.getStart(); int end = region.getEnd(); int numWindows = (end - start + 1) / windowSize; if ((end - start + 1) % windowSize != 0) { numWindows++; end = start + (numWindows * windowSize - 1); } double[] chunks = new double[numWindows]; if (zoomLevel == -1) { // No zoom level available. This can happen because there are not zoom levels or the window size is too small BigWigIterator bigWigIterator = iterator(new Region(region.getChromosome(), start, end)); WigItem wItem; int length, chunkStart, chunkEnd; while (bigWigIterator.hasNext()) { wItem = bigWigIterator.next(); chunkStart = (Math.max(start, wItem.getStartBase()) - start) / windowSize; chunkEnd = (Math.min(end, wItem.getEndBase()) - start) / windowSize; for (int chunk = chunkStart; chunk <= chunkEnd; chunk++) { length = Math.min(wItem.getEndBase() - start, chunk * windowSize + windowSize) - Math.max(wItem.getStartBase() - start, chunk * windowSize); if (chunk < chunks.length) { chunks[chunk] += (wItem.getWigValue() * length); } } } } else { // We get the zoom iterator, we need to increment by 1. ZoomLevelIterator zoomIterator = iterator(new Region(region.getChromosome(), start, end), zoomLevel + 1); ZoomDataRecord wItem; int length, chunkStart, chunkEnd; while (zoomIterator.hasNext()) { wItem = zoomIterator.next(); chunkStart = (Math.max(start, wItem.getChromStart()) - start) / windowSize; chunkEnd = (Math.min(end, wItem.getChromEnd()) - start) / windowSize; for (int chunk = chunkStart; chunk <= chunkEnd; chunk++) { length = Math.min(wItem.getChromEnd() - start, chunk * windowSize + windowSize) - Math.max(wItem.getChromStart() - start, chunk * windowSize); if (chunk < chunks.length) { chunks[chunk] += (wItem.getMeanVal() * length); } } } } for (int i = 0; i < chunks.length; i++) { chunks[i] /= windowSize; } return chunks; } public List<Integer> getZoomWindowSizes() { return zoomWindowSizes; } public BBFileReader getBbFileReader() { return bbFileReader; } }
opencb/biodata
biodata-tools/src/main/java/org/opencb/biodata/tools/feature/BigWigManager.java
Java
apache-2.0
5,997
import sys import ray import pytest from ray.test_utils import ( generate_system_config_map, wait_for_condition, wait_for_pid_to_exit, ) @ray.remote class Increase: def method(self, x): return x + 2 @ray.remote def increase(x): return x + 1 @pytest.mark.parametrize( "ray_start_regular", [ generate_system_config_map( num_heartbeats_timeout=20, ping_gcs_rpc_server_max_retries=60) ], indirect=True) def test_gcs_server_restart(ray_start_regular): actor1 = Increase.remote() result = ray.get(actor1.method.remote(1)) assert result == 3 ray.worker._global_node.kill_gcs_server() ray.worker._global_node.start_gcs_server() result = ray.get(actor1.method.remote(7)) assert result == 9 actor2 = Increase.remote() result = ray.get(actor2.method.remote(2)) assert result == 4 result = ray.get(increase.remote(1)) assert result == 2 @pytest.mark.parametrize( "ray_start_regular", [ generate_system_config_map( num_heartbeats_timeout=20, ping_gcs_rpc_server_max_retries=60) ], indirect=True) def test_gcs_server_restart_during_actor_creation(ray_start_regular): ids = [] for i in range(0, 100): actor = Increase.remote() ids.append(actor.method.remote(1)) ray.worker._global_node.kill_gcs_server() ray.worker._global_node.start_gcs_server() ready, unready = ray.wait(ids, num_returns=100, timeout=240) print("Ready objects is {}.".format(ready)) print("Unready objects is {}.".format(unready)) assert len(unready) == 0 @pytest.mark.parametrize( "ray_start_cluster_head", [ generate_system_config_map( num_heartbeats_timeout=20, ping_gcs_rpc_server_max_retries=60) ], indirect=True) def test_node_failure_detector_when_gcs_server_restart(ray_start_cluster_head): """Checks that the node failure detector is correct when gcs server restart. We set the cluster to timeout nodes after 2 seconds of heartbeats. We then kill gcs server and remove the worker node and restart gcs server again to check that the removed node will die finally. """ cluster = ray_start_cluster_head worker = cluster.add_node() cluster.wait_for_nodes() # Make sure both head and worker node are alive. nodes = ray.nodes() assert len(nodes) == 2 assert nodes[0]["alive"] and nodes[1]["alive"] to_be_removed_node = None for node in nodes: if node["RayletSocketName"] == worker.raylet_socket_name: to_be_removed_node = node assert to_be_removed_node is not None head_node = cluster.head_node gcs_server_process = head_node.all_processes["gcs_server"][0].process gcs_server_pid = gcs_server_process.pid # Kill gcs server. cluster.head_node.kill_gcs_server() # Wait to prevent the gcs server process becoming zombie. gcs_server_process.wait() wait_for_pid_to_exit(gcs_server_pid, 1000) raylet_process = worker.all_processes["raylet"][0].process raylet_pid = raylet_process.pid # Remove worker node. cluster.remove_node(worker, allow_graceful=False) # Wait to prevent the raylet process becoming zombie. raylet_process.wait() wait_for_pid_to_exit(raylet_pid) # Restart gcs server process. cluster.head_node.start_gcs_server() def condition(): nodes = ray.nodes() assert len(nodes) == 2 for node in nodes: if node["NodeID"] == to_be_removed_node["NodeID"]: return not node["alive"] return False # Wait for the removed node dead. wait_for_condition(condition, timeout=10) if __name__ == "__main__": import pytest sys.exit(pytest.main(["-v", __file__]))
robertnishihara/ray
python/ray/tests/test_gcs_fault_tolerance.py
Python
apache-2.0
3,783
:: :: Copyright 2014-2019 Real Logic Ltd. :: :: Licensed under the Apache License, Version 2.0 (the "License"); :: you may not use this file except in compliance with the License. :: You may obtain a copy of the License at :: :: http://www.apache.org/licenses/LICENSE-2.0 :: :: Unless required by applicable law or agreed to in writing, software :: distributed under the License is distributed on an "AS IS" BASIS, :: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. :: See the License for the specific language governing permissions and :: limitations under the License. :: @echo off "%JAVA_HOME%\bin\java" ^ -cp ..\..\build\libs\samples.jar ^ %JVM_OPTS% io.aeron.samples.archive.SegmentInspector %*
galderz/Aeron
aeron-samples/scripts/archive/segment-inspector.bat
Batchfile
apache-2.0
733
// Copyright 2017 DAIMTO ([Linda Lawton](https://twitter.com/LindaLawtonDK)) : [www.daimto.com](http://www.daimto.com/) // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on // an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by DAIMTO-Google-apis-Sample-generator 1.0.0 // Template File Name: APIKey.tt // Build date: 2017-10-08 // C# generater version: 1.0.0 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ // About // // Unoffical sample for the Appengine v1beta API for C#. // This sample is designed to be used with the Google .Net client library. (https://github.com/google/google-api-dotnet-client) // // API Description: The App Engine Admin API enables developers to provision and manage their App Engine applications. // API Documentation Link https://cloud.google.com/appengine/docs/admin-api/ // // Discovery Doc https://www.googleapis.com/discovery/v1/apis/Appengine/v1beta/rest // //------------------------------------------------------------------------------ // Installation // // This sample code uses the Google .Net client library (https://github.com/google/google-api-dotnet-client) // // NuGet package: // // Location: https://www.nuget.org/packages/Google.Apis.Appengine.v1beta/ // Install Command: PM> Install-Package Google.Apis.Appengine.v1beta // //------------------------------------------------------------------------------ using Google.Apis.Appengine.v1beta; using Google.Apis.Services; using System; namespace GoogleSamplecSharpSample.Appenginev1beta.Auth { /// <summary> /// When calling APIs that do not access private user data, you can use simple API keys. These keys are used to authenticate your /// application for accounting purposes. The Google API Console documentation also describes API keys. /// https://support.google.com/cloud/answer/6158857 /// </summary> public static class ApiKeyExample { /// <summary> /// Get a valid AppengineService for a public API Key. /// </summary> /// <param name="apiKey">API key from Google Developer console</param> /// <returns>AppengineService</returns> public static AppengineService GetService(string apiKey) { try { if (string.IsNullOrEmpty(apiKey)) throw new ArgumentNullException("api Key"); return new AppengineService(new BaseClientService.Initializer() { ApiKey = apiKey, ApplicationName = string.Format("{0} API key example", System.Diagnostics.Process.GetCurrentProcess().ProcessName), }); } catch (Exception ex) { throw new Exception("Failed to create new Appengine Service", ex); } } } }
LindaLawton/Google-Dotnet-Samples
Samples/Google App Engine Admin API/v1beta/APIKey.cs
C#
apache-2.0
3,554
# Trimezia martinicensis (Jacq.) Herb. SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in Edwards's Bot. Reg. 30(Misc. ):88. 1844 #### Original name Iris martinicensis Jacq. ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Iridaceae/Trimezia/Trimezia martinicensis/README.md
Markdown
apache-2.0
249
import React from 'react'; import PropTypes from 'prop-types'; import classNames from 'classnames'; import './styles/ColumnView.scss'; import prefixClass from './utils/prefixClass'; import icBEM from './utils/icBEM'; export const COMPONENT_NAME = prefixClass('column-view'); const ROOT_BEM = icBEM(COMPONENT_NAME); export const BEM = { root: ROOT_BEM, header: ROOT_BEM.element('header'), body: ROOT_BEM.element('body'), footer: ROOT_BEM.element('footer'), }; export default function ColumnView({ header, footer, flexBody, bodyPadding, // React props className, headerRef, bodyRef, footerRef, children, ...wrapperProps }) { const rootClassName = classNames(`${BEM.root}`, className); const bodyClassName = BEM.body.modifier('flex', flexBody); const bodyStyle = { paddingTop: bodyPadding.top, paddingBottom: bodyPadding.bottom, paddingLeft: bodyPadding.left, paddingRight: bodyPadding.right, }; return ( <div className={rootClassName} {...wrapperProps} > {header && ( <div className={`${BEM.header}`} ref={headerRef} > {header} </div> )} <div className={`${bodyClassName}`} style={bodyStyle} ref={bodyRef} > {children} </div> {footer && ( <div className={`${BEM.footer}`} ref={footerRef} > {footer} </div> )} </div> ); } ColumnView.propTypes = { header: PropTypes.node, footer: PropTypes.node, flexBody: PropTypes.bool, bodyPadding: PropTypes.shape({ top: PropTypes.number, bottom: PropTypes.number, left: PropTypes.number, right: PropTypes.number, }), // eslint-disable-next-line react/forbid-prop-types headerRef: PropTypes.any, // eslint-disable-next-line react/forbid-prop-types bodyRef: PropTypes.any, // eslint-disable-next-line react/forbid-prop-types footerRef: PropTypes.any, }; ColumnView.defaultProps = { header: undefined, footer: undefined, flexBody: false, bodyPadding: { bottom: 24 }, headerRef: undefined, bodyRef: undefined, footerRef: undefined, };
iCHEF/gypcrete
packages/core/src/ColumnView.js
JavaScript
apache-2.0
2,190
// Copyright 2016 CNI authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package main_test import ( "fmt" "net" "os/exec" "strings" "github.com/containernetworking/plugins/pkg/ns" "github.com/containernetworking/plugins/pkg/testutils" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/onsi/gomega/gbytes" "github.com/onsi/gomega/gexec" ) func generateConfig(cniVersion string) *strings.Reader { return strings.NewReader(fmt.Sprintf(`{ "name": "loopback-test", "cniVersion": "%s" }`, cniVersion)) } var _ = Describe("Loopback", func() { var ( networkNS ns.NetNS command *exec.Cmd environ []string ) BeforeEach(func() { command = exec.Command(pathToLoPlugin) var err error networkNS, err = testutils.NewNS() Expect(err).NotTo(HaveOccurred()) environ = []string{ fmt.Sprintf("CNI_CONTAINERID=%s", "dummy"), fmt.Sprintf("CNI_NETNS=%s", networkNS.Path()), fmt.Sprintf("CNI_IFNAME=%s", "lo"), fmt.Sprintf("CNI_ARGS=%s", "none"), fmt.Sprintf("CNI_PATH=%s", "/some/test/path"), } }) AfterEach(func() { Expect(networkNS.Close()).To(Succeed()) Expect(testutils.UnmountNS(networkNS)).To(Succeed()) }) for _, ver := range testutils.AllSpecVersions { // Redefine ver inside for scope so real value is picked up by each dynamically defined It() // See Gingkgo's "Patterns for dynamically generating tests" documentation. ver := ver Context("when given a network namespace", func() { It(fmt.Sprintf("[%s] sets the lo device to UP", ver), func() { command.Stdin = generateConfig(ver) command.Env = append(environ, fmt.Sprintf("CNI_COMMAND=%s", "ADD")) session, err := gexec.Start(command, GinkgoWriter, GinkgoWriter) Expect(err).NotTo(HaveOccurred()) Eventually(session).Should(gbytes.Say(`{.*}`)) Eventually(session).Should(gexec.Exit(0)) var lo *net.Interface err = networkNS.Do(func(ns.NetNS) error { var err error lo, err = net.InterfaceByName("lo") return err }) Expect(err).NotTo(HaveOccurred()) Expect(lo.Flags & net.FlagUp).To(Equal(net.FlagUp)) }) It(fmt.Sprintf("[%s] sets the lo device to DOWN", ver), func() { command.Stdin = generateConfig(ver) command.Env = append(environ, fmt.Sprintf("CNI_COMMAND=%s", "DEL")) session, err := gexec.Start(command, GinkgoWriter, GinkgoWriter) Expect(err).NotTo(HaveOccurred()) Eventually(session).Should(gbytes.Say(``)) Eventually(session).Should(gexec.Exit(0)) var lo *net.Interface err = networkNS.Do(func(ns.NetNS) error { var err error lo, err = net.InterfaceByName("lo") return err }) Expect(err).NotTo(HaveOccurred()) Expect(lo.Flags & net.FlagUp).NotTo(Equal(net.FlagUp)) }) }) } })
containernetworking/plugins
plugins/main/loopback/loopback_test.go
GO
apache-2.0
3,273
--- layout: post title: "流离在过去的Css样式" subtitle: "健忘的福音" date: 2018-09-16 08:50:15 author: "AXiang" header-img: "img/post-bg-css.jpg" catalog: true tags: - 前端 - Css --- > 老忘记但是炒鸡好用的Css样式 ##### 视差效果:background-attachment: fixed; 视差滚动能让多层背景以不同的速度移动,形成立体的运动效果,带来非常出色的视觉体验。在CSS中定义背景滚动方式的属性是[backgroud-attacthment](http://www.w3school.com.cn/cssref/pr_background-attachment.asp),下面简单的写了一个Demo。 <style> .demo { margin-bottom: 30px; } .demo font,.demo-title { color: #1e70cd; margin: 40px 0 5px; text-align: center; font-size: 20px; font-weight: 300; line-height: 35px; display: block; } .demo .demo-iframe { margin: 1em auto; box-shadow: 0px 0px 1px 0px #aaa; background: #eee; position: relative; } .demo iframe { display: block; width: 100%; border: none; margin: 0; box-sizing: border-box; height: 400px; width: 1px; min-width: 100%; } </style> <div class="demo"> <font>简易视差Demo</font> <div class="demo-iframe"> <iframe frameborder="0" scrolling="yes" src="http://wangxiang.vip/practiced-parallax_image/" style="height:340px"></iframe> </div> </div> ##### 弹性盒子模型 display:flex CSS3的弹性盒提供了一种当页面需要适应不同的屏幕大小以及设备类型时确保元素拥有恰当的行为的布局方式,不需要在设置左右浮动,margin,padding来调整位置。 强烈推荐阮一峰大牛的教程:[Flex 布局教程](http://www.ruanyifeng.com/blog/2015/07/flex-grammar.html),举例几个常用的参数: 以下5个属性设置在容器上(第一个属性值均为默认值): - flex-direction (主轴的方向:row(横向从左到右) column(纵向从上到下)) - justify-content (主轴的对齐方式:flex-start(左对齐) flex-end(右对齐) center(居中) space-between(两端对齐,间隔相等) space-around(两端对齐,边距较大)) - align-items (纵向交叉轴对齐方式:stretch(占满) flex-start(起点对齐) flex-end(终点对齐) center(居中)) - align-content (多个轴线的对齐方式:stretch(轴线占满)flex-start(左对齐) flex-end(右对齐) center(居中) space-between(两端对齐,间隔相等) space-around(两端对齐,边距较大)) - flex-wrap (是否换行:nowrap(不换) wrap(换)) 以下2个属性设置在子类上(常用): - align-self (允许子类与其他不同的对齐方式,可覆盖align-items属性:与`align-items`属性完全一致) - flex-grow (定义项目的放大比例,默认为0,如果所有项目的flex-grow属性都为1,则它们将等分剩余空间(如果有的话)) ##### 单行省略 ```html overflow: hidden; text-overflow: ellipsis; white-space: nowrap; ``` 让一个自身有宽度或是容器有宽度的元素实现单行溢出省略(省略号表示) ##### 页内滚动 ```html overflow: auto; //使IOS下的滑轮和原生一致,丝滑般顺畅 -webkit-overflow-scrolling:touch; ``` 效果类似下图![效果图](/img/in-post/post-css/css_1711_3.png) ##### 移动端图像禁止拷贝 在移动端的webkit内核浏览器的下,可以防止用户长按图片跳出系统默认菜单(菜单有下载图片之类的功能),使用如下代码: ```css img{ -webkit-touch-callout:none; } ``` ##### 常用选择器 p:first-child 选择器:选择父级下的首个元素为`<p>`元素; p:last-child 选择器:选择父级下的最后一个为`<p>`元素; p:nth-child(2) 选择器:选择父级下的第2个为`<p>`元素; ##### calc()四则运算动态计算长宽高 用于动态计算长度值。 需要注意的是,运算符前后都需要保留一个空格,例如:width: calc(100% - 10px); 任何长度值都可以使用calc()函数进行计算; calc()函数支持 `+`, `-`, `*`, `/` 运算; calc()函数使用标准的数学运算优先级规则; 兼容到IE9IE9+、FF4.0+、Chrome19+、Safari6+,需要加上识别符,小程序完美支持。 ```css /* 宽度为1000px,下列表达式则为 950px */ .elm{ width: calc(100%-50px); } ``` ##### transform和position: fixed 如果在父级有设置`transform`属性时,容器内设有position: fixed属性将会失效,变成仅仅只是 position: absolute 的效果; Google给的解释是**position: fixed" still do not cope with transform value**
lykqnbz/lykqnbz.github.io
_posts/2018-09-16-AmazingCss.markdown
Markdown
apache-2.0
4,756
## ReplaceKeys request ### Description ReplaceKeys is a request to change the keys on an E-meter. The request needs the DeviceIdentification, an AuthenticationKey and an EncryptionKey. All requests have similar response behaviour which is described in [ResponseMessages](./ResponseMessages.md). [GetReplaceKeysResponse](GetReplaceKeysResponse.md) returns if the result is successful from the ReplaceKeys request. The response contains the DeviceIdentification and CorrelationUid which is received from the ReplaceKeys request. ### References XSD: [sm-configuration.xsd](https://github.com/OSGP/Platform/blob/development/osgp-adapter-ws-smartmetering/src/main/webapp/WEB-INF/wsdl/smartmetering/schemas/sm-configuration.xsd) WSDL: [SmartMeteringConfiguration.wsdl](https://github.com/OSGP/Platform/blob/development/osgp-adapter-ws-smartmetering/src/main/webapp/WEB-INF/wsdl/smartmetering/SmartMeteringConfiguration.wsdl)
rlemmers/rle-test-repo
Domains/Smartmetering/smartmeteringwebservices/ReplaceKeys.md
Markdown
apache-2.0
926
# Copyright (c) 2013 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Middleware that will provide Static Large Object (SLO) support. This feature is very similar to Dynamic Large Object (DLO) support in that it allows the user to upload many objects concurrently and afterwards download them as a single object. It is different in that it does not rely on eventually consistent container listings to do so. Instead, a user defined manifest of the object segments is used. ---------------------- Uploading the Manifest ---------------------- After the user has uploaded the objects to be concatenated a manifest is uploaded. The request must be a PUT with the query parameter:: ?multipart-manifest=put The body of this request will be an ordered list of files in json data format. The data to be supplied for each segment is:: path: the path to the segment (not including account) /container/object_name etag: the etag given back when the segment was PUT size_bytes: the size of the segment in bytes The format of the list will be:: json: [{"path": "/cont/object", "etag": "etagoftheobjectsegment", "size_bytes": 1048576}, ...] The number of object segments is limited to a configurable amount, default 1000. Each segment, except for the final one, must be at least 1 megabyte (configurable). On upload, the middleware will head every segment passed in and verify the size and etag of each. If any of the objects do not match (not found, size/etag mismatch, below minimum size) then the user will receive a 4xx error response. If everything does match, the user will receive a 2xx response and the SLO object is ready for downloading. Behind the scenes, on success, a json manifest generated from the user input is sent to object servers with an extra "X-Static-Large-Object: True" header and a modified Content-Type. The parameter: swift_bytes=$total_size will be appended to the existing Content-Type, where total_size is the sum of all the included segments' size_bytes. This extra parameter will be hidden from the user. Manifest files can reference objects in separate containers, which will improve concurrent upload speed. Objects can be referenced by multiple manifests. The segments of a SLO manifest can even be other SLO manifests. Treat them as any other object i.e., use the Etag and Content-Length given on the PUT of the sub-SLO in the manifest to the parent SLO. ------------------------- Retrieving a Large Object ------------------------- A GET request to the manifest object will return the concatenation of the objects from the manifest much like DLO. If any of the segments from the manifest are not found or their Etag/Content Length no longer match the connection will drop. In this case a 409 Conflict will be logged in the proxy logs and the user will receive incomplete results. The headers from this GET or HEAD request will return the metadata attached to the manifest object itself with some exceptions:: Content-Length: the total size of the SLO (the sum of the sizes of the segments in the manifest) X-Static-Large-Object: True Etag: the etag of the SLO (generated the same way as DLO) A GET request with the query parameter:: ?multipart-manifest=get Will return the actual manifest file itself. This is generated json and does not match the data sent from the original multipart-manifest=put. This call's main purpose is for debugging. When the manifest object is uploaded you are more or less guaranteed that every segment in the manifest exists and matched the specifications. However, there is nothing that prevents the user from breaking the SLO download by deleting/replacing a segment referenced in the manifest. It is left to the user use caution in handling the segments. ----------------------- Deleting a Large Object ----------------------- A DELETE request will just delete the manifest object itself. A DELETE with a query parameter:: ?multipart-manifest=delete will delete all the segments referenced in the manifest and then the manifest itself. The failure response will be similar to the bulk delete middleware. ------------------------ Modifying a Large Object ------------------------ PUTs / POSTs will work as expected, PUTs will just overwrite the manifest object for example. ------------------ Container Listings ------------------ In a container listing the size listed for SLO manifest objects will be the total_size of the concatenated segments in the manifest. The overall X-Container-Bytes-Used for the container (and subsequently for the account) will not reflect total_size of the manifest but the actual size of the json data stored. The reason for this somewhat confusing discrepancy is we want the container listing to reflect the size of the manifest object when it is downloaded. We do not, however, want to count the bytes-used twice (for both the manifest and the segments it's referring to) in the container and account metadata which can be used for stats purposes. """ from urllib import quote from cStringIO import StringIO from datetime import datetime import mimetypes from hashlib import md5 from swift.common.swob import Request, HTTPBadRequest, HTTPServerError, \ HTTPMethodNotAllowed, HTTPRequestEntityTooLarge, HTTPLengthRequired, \ HTTPOk, HTTPPreconditionFailed, HTTPException, HTTPNotFound, \ HTTPUnauthorized from swift.common.utils import json, get_logger, config_true_value from swift.common.constraints import check_utf8, MAX_BUFFERED_SLO_SEGMENTS from swift.common.http import HTTP_NOT_FOUND, HTTP_UNAUTHORIZED from swift.common.wsgi import WSGIContext from swift.common.middleware.bulk import get_response_body, \ ACCEPTABLE_FORMATS, Bulk def parse_input(raw_data): """ Given a request will parse the body and return a list of dictionaries :raises: HTTPException on parse errors :returns: a list of dictionaries on success """ try: parsed_data = json.loads(raw_data) except ValueError: raise HTTPBadRequest("Manifest must be valid json.") req_keys = set(['path', 'etag', 'size_bytes']) try: for seg_dict in parsed_data: if (set(seg_dict) != req_keys or '/' not in seg_dict['path'].lstrip('/')): raise HTTPBadRequest('Invalid SLO Manifest File') except (AttributeError, TypeError): raise HTTPBadRequest('Invalid SLO Manifest File') return parsed_data class SloContext(WSGIContext): def __init__(self, slo, slo_etag): WSGIContext.__init__(self, slo.app) self.slo_etag = '"' + slo_etag.hexdigest() + '"' def handle_slo_put(self, req, start_response): app_resp = self._app_call(req.environ) for i in xrange(len(self._response_headers)): if self._response_headers[i][0].lower() == 'etag': self._response_headers[i] = ('Etag', self.slo_etag) break start_response(self._response_status, self._response_headers, self._response_exc_info) return app_resp class StaticLargeObject(object): """ StaticLargeObject Middleware See above for a full description. The proxy logs created for any subrequests made will have swift.source set to "SLO". :param app: The next WSGI filter or app in the paste.deploy chain. :param conf: The configuration dict for the middleware. """ def __init__(self, app, conf): self.conf = conf self.app = app self.logger = get_logger(conf, log_route='slo') self.max_manifest_segments = int(self.conf.get('max_manifest_segments', 1000)) self.max_manifest_size = int(self.conf.get('max_manifest_size', 1024 * 1024 * 2)) self.min_segment_size = int(self.conf.get('min_segment_size', 1024 * 1024)) self.bulk_deleter = Bulk(app, {}) def handle_multipart_put(self, req, start_response): """ Will handle the PUT of a SLO manifest. Heads every object in manifest to check if is valid and if so will save a manifest generated from the user input. Uses WSGIContext to call self.app and start_response and returns a WSGI iterator. :params req: a swob.Request with an obj in path :raises: HttpException on errors """ try: vrs, account, container, obj = req.split_path(1, 4, True) except ValueError: return self.app(req.environ, start_response) if req.content_length > self.max_manifest_size: raise HTTPRequestEntityTooLarge( "Manifest File > %d bytes" % self.max_manifest_size) if req.headers.get('X-Copy-From'): raise HTTPMethodNotAllowed( 'Multipart Manifest PUTs cannot be Copy requests') if req.content_length is None and \ req.headers.get('transfer-encoding', '').lower() != 'chunked': raise HTTPLengthRequired(request=req) parsed_data = parse_input(req.body_file.read(self.max_manifest_size)) problem_segments = [] if len(parsed_data) > self.max_manifest_segments: raise HTTPRequestEntityTooLarge( 'Number segments must be <= %d' % self.max_manifest_segments) total_size = 0 out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS) if not out_content_type: out_content_type = 'text/plain' data_for_storage = [] slo_etag = md5() for index, seg_dict in enumerate(parsed_data): obj_name = seg_dict['path'] if isinstance(obj_name, unicode): obj_name = obj_name.encode('utf-8') obj_path = '/'.join(['', vrs, account, obj_name.lstrip('/')]) try: seg_size = int(seg_dict['size_bytes']) except (ValueError, TypeError): raise HTTPBadRequest('Invalid Manifest File') if seg_size < self.min_segment_size and \ (index == 0 or index < len(parsed_data) - 1): raise HTTPBadRequest( 'Each segment, except the last, must be larger than ' '%d bytes.' % self.min_segment_size) new_env = req.environ.copy() new_env['PATH_INFO'] = obj_path new_env['REQUEST_METHOD'] = 'HEAD' new_env['swift.source'] = 'SLO' del(new_env['wsgi.input']) del(new_env['QUERY_STRING']) new_env['CONTENT_LENGTH'] = 0 new_env['HTTP_USER_AGENT'] = \ '%s MultipartPUT' % req.environ.get('HTTP_USER_AGENT') headers = {'fingerprint':seg_dict['etag']} head_seg_resp = \ Request.blank(obj_path, headers=headers, environ=new_env).get_response(self.app) if head_seg_resp.is_success: total_size += seg_size if seg_size != head_seg_resp.content_length: problem_segments.append([quote(obj_name), 'Size Mismatch']) if seg_dict['etag'] == head_seg_resp.etag: slo_etag.update(seg_dict['etag']) else: problem_segments.append([quote(obj_name), 'Etag Mismatch']) if head_seg_resp.last_modified: last_modified = head_seg_resp.last_modified else: # shouldn't happen last_modified = datetime.now() last_modified_formatted = \ last_modified.strftime('%Y-%m-%dT%H:%M:%S.%f') seg_data = {'name': '/' + seg_dict['path'].lstrip('/'), 'bytes': seg_size, 'hash': seg_dict['etag'], 'content_type': head_seg_resp.content_type, 'last_modified': last_modified_formatted} if config_true_value( head_seg_resp.headers.get('X-Static-Large-Object')): seg_data['sub_slo'] = True data_for_storage.append(seg_data) else: problem_segments.append([quote(obj_name), head_seg_resp.status]) if problem_segments: resp_body = get_response_body( out_content_type, {}, problem_segments) raise HTTPBadRequest(resp_body, content_type=out_content_type) env = req.environ if not env.get('CONTENT_TYPE'): guessed_type, _junk = mimetypes.guess_type(req.path_info) env['CONTENT_TYPE'] = guessed_type or 'application/octet-stream' env['swift.content_type_overriden'] = True env['CONTENT_TYPE'] += ";swift_bytes=%d" % total_size env['HTTP_X_STATIC_LARGE_OBJECT'] = 'True' json_data = json.dumps(data_for_storage) env['CONTENT_LENGTH'] = str(len(json_data)) env['wsgi.input'] = StringIO(json_data) slo_context = SloContext(self, slo_etag) return slo_context.handle_slo_put(req, start_response) def get_segments_to_delete_iter(self, req): """ A generator function to be used to delete all the segments and sub-segments referenced in a manifest. :params req: a swob.Request with an SLO manifest in path :raises HTTPPreconditionFailed: on invalid UTF8 in request path :raises HTTPBadRequest: on too many buffered sub segments and on invalid SLO manifest path """ if not check_utf8(req.path_info): raise HTTPPreconditionFailed( request=req, body='Invalid UTF8 or contains NULL') try: vrs, account, container, obj = req.split_path(4, 4, True) except ValueError: raise HTTPBadRequest('Invalid SLO manifiest path') segments = [{ 'sub_slo': True, 'name': ('/%s/%s' % (container, obj)).decode('utf-8')}] while segments: if len(segments) > MAX_BUFFERED_SLO_SEGMENTS: raise HTTPBadRequest( 'Too many buffered slo segments to delete.') seg_data = segments.pop(0) if seg_data.get('sub_slo'): try: segments.extend( self.get_slo_segments(seg_data['name'], req)) except HTTPException as err: # allow bulk delete response to report errors seg_data['error'] = {'code': err.status_int, 'message': err.body} # add manifest back to be deleted after segments seg_data['sub_slo'] = False segments.append(seg_data) else: seg_data['name'] = seg_data['name'].encode('utf-8') yield seg_data def get_slo_segments(self, obj_name, req): """ Performs a swob.Request and returns the SLO manifest's segments. :raises HTTPServerError: on unable to load obj_name or on unable to load the SLO manifest data. :raises HTTPBadRequest: on not an SLO manifest :raises HTTPNotFound: on SLO manifest not found :returns: SLO manifest's segments """ vrs, account, _junk = req.split_path(2, 3, True) new_env = req.environ.copy() new_env['REQUEST_METHOD'] = 'GET' del(new_env['wsgi.input']) new_env['QUERY_STRING'] = 'multipart-manifest=get' new_env['CONTENT_LENGTH'] = 0 new_env['HTTP_USER_AGENT'] = \ '%s MultipartDELETE' % new_env.get('HTTP_USER_AGENT') new_env['swift.source'] = 'SLO' new_env['PATH_INFO'] = ( '/%s/%s/%s' % ( vrs, account, obj_name.lstrip('/'))).encode('utf-8') resp = Request.blank('', new_env).get_response(self.app) if resp.is_success: if config_true_value(resp.headers.get('X-Static-Large-Object')): try: return json.loads(resp.body) except ValueError: raise HTTPServerError('Unable to load SLO manifest') else: raise HTTPBadRequest('Not an SLO manifest') elif resp.status_int == HTTP_NOT_FOUND: raise HTTPNotFound('SLO manifest not found') elif resp.status_int == HTTP_UNAUTHORIZED: raise HTTPUnauthorized('401 Unauthorized') else: raise HTTPServerError('Unable to load SLO manifest or segment.') def handle_multipart_delete(self, req): """ Will delete all the segments in the SLO manifest and then, if successful, will delete the manifest file. :params req: a swob.Request with an obj in path :returns: swob.Response whose app_iter set to Bulk.handle_delete_iter """ resp = HTTPOk(request=req) out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS) if out_content_type: resp.content_type = out_content_type resp.app_iter = self.bulk_deleter.handle_delete_iter( req, objs_to_delete=self.get_segments_to_delete_iter(req), user_agent='MultipartDELETE', swift_source='SLO', out_content_type=out_content_type) return resp def __call__(self, env, start_response): """ WSGI entry point """ req = Request(env) try: vrs, account, container, obj = req.split_path(1, 4, True) except ValueError: return self.app(env, start_response) try: if obj: if req.method == 'PUT' and \ req.params.get('multipart-manifest') == 'put': return self.handle_multipart_put(req, start_response) if req.method == 'DELETE' and \ req.params.get('multipart-manifest') == 'delete': return self.handle_multipart_delete(req)(env, start_response) if 'X-Static-Large-Object' in req.headers: raise HTTPBadRequest( request=req, body='X-Static-Large-Object is a reserved header. ' 'To create a static large object add query param ' 'multipart-manifest=put.') except HTTPException as err_resp: return err_resp(env, start_response) return self.app(env, start_response) def filter_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) def slo_filter(app): return StaticLargeObject(app, conf) return slo_filter
lielongxingkong/windchimes
swift/common/middleware/slo.py
Python
apache-2.0
19,534
# Embellisia hyacinthi de Hoog & P.J. Mull. bis, 1973 SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in Neth. Jl Pl. Path. 79(3): 85 (1973) #### Original name Embellisia hyacinthi de Hoog & P.J. Mull. bis, 1973 ### Remarks null
mdoering/backbone
life/Fungi/Ascomycota/Dothideomycetes/Pleosporales/Pleosporaceae/Embellisia/Embellisia hyacinthi/README.md
Markdown
apache-2.0
287
# Nyctalis lycoperdoides (Bull.) Konrad & Maubl. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name Agaricus lycoperdoides Bull., 1784 ### Remarks null
mdoering/backbone
life/Fungi/Basidiomycota/Agaricomycetes/Agaricales/Lyophyllaceae/Asterophora/Asterophora lycoperdoides/ Syn. Nyctalis lycoperdoides/README.md
Markdown
apache-2.0
233
# Dentiscutata reticulata (Koske, D.D. Mill. & C. Walker) Sieverd., F.A. Souza & Oehl, 2008 SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Fungi/Glomeromycota/Glomeromycetes/Diversisporales/Gigasporaceae/Scutellospora/Scutellospora reticulata/ Syn. Dentiscutata reticulata/README.md
Markdown
apache-2.0
246
# Usnea splendida Motyka SPECIES #### Status ACCEPTED #### According to Index Fungorum #### Published in null #### Original name Usnea splendida Motyka ### Remarks null
mdoering/backbone
life/Fungi/Ascomycota/Lecanoromycetes/Lecanorales/Parmeliaceae/Usnea/Usnea splendida/README.md
Markdown
apache-2.0
173
# Habenaria caranjensis Dalzell SPECIES #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Orchidaceae/Habenaria/Habenaria caranjensis/README.md
Markdown
apache-2.0
187
# Xiphion sisyrinchium (L.) Baker SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Iridaceae/Moraea/Moraea sisyrinchium/ Syn. Xiphion sisyrinchium/README.md
Markdown
apache-2.0
188
# Hieracium bifidum subsp. stenolepidotropum K. Malý & Zahn SUBSPECIES #### Status ACCEPTED #### According to Euro+Med Plantbase #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Hieracium/Hieracium bifidum/Hieracium bifidum stenolepidotropum/README.md
Markdown
apache-2.0
198
# Aremoricanium decoratum Loeblich & MacAdam, 1971 SPECIES #### Status ACCEPTED #### According to Interim Register of Marine and Nonmarine Genera #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Protozoa/Acritarcha/Aremoricanium/Aremoricanium decoratum/README.md
Markdown
apache-2.0
214
# Aganonerion Pierre & Spire GENUS #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Apocynaceae/Aganonerion/README.md
Markdown
apache-2.0
174
# Mascagnia benthamiana (Griseb.) W.R.Anderson SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Malpighiales/Malpighiaceae/Mascagnia/Mascagnia benthamiana/README.md
Markdown
apache-2.0
194
import { EventEmitter } from 'events' import { applyReducer, Operation } from 'fast-json-patch' import { camelCase, omit } from 'lodash' import { FORMAT_TEXT_MAP, SpanContext, Tracer } from 'opentracing' import { Observable, Subscription, Symbol } from 'rxjs' import { inspect } from 'util' import { ErrorCodes, Message, StreamMessageReader as VSCodeStreamMessageReader, StreamMessageWriter as VSCodeStreamMessageWriter, } from 'vscode-jsonrpc' import { isNotificationMessage, isRequestMessage, isResponseMessage, NotificationMessage, RequestMessage, ResponseMessage, } from 'vscode-jsonrpc/lib/messages' import { Logger, NoopLogger } from './logging' import { InitializeParams, PartialResultParams } from './request-type' import { TypeScriptService } from './typescript-service' /** * Interface for JSON RPC messages with tracing metadata */ export interface HasMeta { meta: { [key: string]: any } } /** * Returns true if the passed argument has a meta field */ function hasMeta(candidate: any): candidate is HasMeta { return ( typeof candidate === 'object' && candidate !== null && typeof candidate.meta === 'object' && candidate.meta !== null ) } /** * Returns true if the passed argument is an object with a `.then()` method */ function isPromiseLike(candidate: any): candidate is PromiseLike<any> { return typeof candidate === 'object' && candidate !== null && typeof candidate.then === 'function' } /** * Returns true if the passed argument is an object with a `[Symbol.observable]` method */ function isObservable(candidate: any): candidate is Observable<any> { return typeof candidate === 'object' && candidate !== null && typeof candidate[Symbol.observable] === 'function' } export interface MessageLogOptions { /** Logger to use */ logger?: Logger /** Whether to log all messages */ logMessages?: boolean } /** * Takes a NodeJS ReadableStream and emits parsed messages received on the stream. * In opposite to StreamMessageReader, supports multiple listeners and is compatible with Observables */ export class MessageEmitter extends EventEmitter { constructor(input: NodeJS.ReadableStream, options: MessageLogOptions = {}) { super() const reader = new VSCodeStreamMessageReader(input) // Forward events reader.listen(msg => { this.emit('message', msg) }) reader.onError(err => { this.emit('error', err) }) reader.onClose(() => { this.emit('close') }) this.setMaxListeners(Infinity) // Register message listener to log messages if configured if (options.logMessages && options.logger) { const logger = options.logger this.on('message', message => { logger.log('-->', message) }) } } /** Emitted when a new JSON RPC message was received on the input stream */ public on(event: 'message', listener: (message: Message) => void): this /** Emitted when the underlying input stream emitted an error */ public on(event: 'error', listener: (error: Error) => void): this /** Emitted when the underlying input stream was closed */ public on(event: 'close', listener: () => void): this /* istanbul ignore next */ public on(event: string, listener: (arg?: any) => void): this { return super.on(event, listener) } /** Emitted when a new JSON RPC message was received on the input stream */ public once(event: 'message', listener: (message: Message) => void): this /** Emitted when the underlying input stream emitted an error */ public once(event: 'error', listener: (error: Error) => void): this /** Emitted when the underlying input stream was closed */ public once(event: 'close', listener: () => void): this /* istanbul ignore next */ public once(event: string, listener: (arg?: any) => void): this { return super.on(event, listener) } } /** * Wraps vscode-jsonrpcs StreamMessageWriter to support logging messages, * decouple our code from the vscode-jsonrpc module and provide a more * consistent event API */ export class MessageWriter { private logger: Logger private logMessages: boolean private vscodeWriter: VSCodeStreamMessageWriter /** * @param output The output stream to write to (e.g. STDOUT or a socket) * @param options */ constructor(output: NodeJS.WritableStream, options: MessageLogOptions = {}) { this.vscodeWriter = new VSCodeStreamMessageWriter(output) this.logger = options.logger || new NoopLogger() this.logMessages = !!options.logMessages } /** * Writes a JSON RPC message to the output stream. * Logs it if configured * * @param message A complete JSON RPC message object */ public write(message: RequestMessage | NotificationMessage | ResponseMessage): void { if (this.logMessages) { this.logger.log('<--', message) } this.vscodeWriter.write(message) } } export interface RegisterLanguageHandlerOptions { logger?: Logger /** An opentracing-compatible tracer */ tracer?: Tracer } /** * Registers all method implementations of a LanguageHandler on a connection * * @param messageEmitter MessageEmitter to listen on * @param messageWriter MessageWriter to write to * @param handler TypeScriptService object that contains methods for all methods to be handled */ export function registerLanguageHandler( messageEmitter: MessageEmitter, messageWriter: MessageWriter, handler: TypeScriptService, options: RegisterLanguageHandlerOptions = {} ): void { const logger = options.logger || new NoopLogger() const tracer = options.tracer || new Tracer() /** Tracks Subscriptions for results to unsubscribe them on $/cancelRequest */ const subscriptions = new Map<string | number, Subscription>() /** * Whether the handler is in an initialized state. * `initialize` sets this to true, `shutdown` to false. * Used to determine whether a manual `shutdown` call is needed on error/close */ let initialized = false /** Whether the client supports streaming with $/partialResult */ let streaming = false messageEmitter.on('message', async message => { // Ignore responses if (isResponseMessage(message)) { return } if (!isRequestMessage(message) && !isNotificationMessage(message)) { logger.error('Received invalid message:', message) return } switch (message.method) { case 'initialize': initialized = true streaming = !!(message.params as InitializeParams).capabilities.streaming break case 'shutdown': initialized = false break case 'exit': // Ignore exit notification, it's not the responsibility of the TypeScriptService to handle it, // but the TCP / STDIO server which needs to close the socket or kill the process for (const subscription of subscriptions.values()) { subscription.unsubscribe() } return case '$/cancelRequest': // Cancel another request by unsubscribing from the Observable const subscription = subscriptions.get(message.params.id) if (!subscription) { logger.warn(`$/cancelRequest for unknown request ID ${message.params.id}`) return } subscription.unsubscribe() subscriptions.delete(message.params.id) messageWriter.write({ jsonrpc: '2.0', id: message.params.id, error: { message: 'Request cancelled', code: ErrorCodes.RequestCancelled, }, }) return } const method = camelCase(message.method) let context: SpanContext | undefined // If message is request and has tracing metadata, extract the span context if (isRequestMessage(message) && hasMeta(message)) { context = tracer.extract(FORMAT_TEXT_MAP, message.meta) || undefined } const span = tracer.startSpan('Handle ' + message.method, { childOf: context }) span.setTag('params', inspect(message.params)) if (typeof (handler as any)[method] !== 'function') { // Method not implemented if (isRequestMessage(message)) { messageWriter.write({ jsonrpc: '2.0', id: message.id, error: { code: ErrorCodes.MethodNotFound, message: `Method ${method} not implemented`, }, }) } else { logger.warn(`Method ${method} not implemented`) } return } // Call handler method with params and span let observable: Observable<Operation> try { // Convert return value to Observable const returnValue = (handler as any)[method](message.params, span) if (isObservable(returnValue)) { observable = returnValue } else if (isPromiseLike(returnValue)) { observable = Observable.from(returnValue) } else { observable = Observable.of(returnValue) } } catch (err) { observable = Observable.throw(err) } if (isRequestMessage(message)) { const subscription = observable .do(patch => { if (streaming) { span.log({ event: 'partialResult', patch }) // Send $/partialResult for partial result patches only if client supports it messageWriter.write({ jsonrpc: '2.0', method: '$/partialResult', params: { id: message.id, patch: [patch], } as PartialResultParams, }) } }) // Build up final result for BC // TODO send null if client declared streaming capability .reduce<Operation, any>(applyReducer, null) .finally(() => { // Finish span span.finish() // Delete subscription from Map // Make sure to not run this before subscription.set() was called // (in case the Observable is synchronous) process.nextTick(() => { subscriptions.delete(message.id) }) }) .subscribe( result => { // Send final result messageWriter.write({ jsonrpc: '2.0', id: message.id, result, }) }, err => { // Set error on span span.setTag('error', true) span.log({ event: 'error', 'error.object': err, message: err.message, stack: err.stack }) // Log error logger.error(`Handler for ${message.method} failed:`, err, '\nMessage:', message) // Send error response messageWriter.write({ jsonrpc: '2.0', id: message.id, error: { message: err.message + '', code: typeof err.code === 'number' ? err.code : ErrorCodes.UnknownErrorCode, data: omit(err, ['message', 'code']), }, }) } ) // Save subscription for $/cancelRequest subscriptions.set(message.id, subscription) } else { // For notifications, still subscribe and log potential error observable.subscribe(undefined, err => { logger.error(`Handle ${method}:`, err) }) } }) // On stream close, shutdown handler if it was initialized messageEmitter.once('close', () => { // Cancel all outstanding requests for (const subscription of subscriptions.values()) { subscription.unsubscribe() } if (initialized) { initialized = false logger.error('Stream was closed without shutdown notification') handler.shutdown() } }) // On stream error, shutdown handler if it was initialized messageEmitter.once('error', err => { // Cancel all outstanding requests for (const subscription of subscriptions.values()) { subscription.unsubscribe() } if (initialized) { initialized = false logger.error('Stream:', err) handler.shutdown() } }) }
sourcegraph/javascript-typescript-langserver
src/connection.ts
TypeScript
apache-2.0
13,639
package sdk.java.src.org.opencv.core; import java.util.Arrays; import java.util.List; public class MatOfRect extends Mat { // 32SC4 private static final int _depth = CvType.CV_32S; private static final int _channels = 4; public MatOfRect() { super(); } protected MatOfRect(long addr) { super(addr); if (!empty() && checkVector(_channels, _depth) < 0) throw new IllegalArgumentException("Incompatible Mat"); //FIXME: do we need release() here? } public static MatOfRect fromNativeAddr(long addr) { return new MatOfRect(addr); } public MatOfRect(Mat m) { super(m, Range.all()); if (!empty() && checkVector(_channels, _depth) < 0) throw new IllegalArgumentException("Incompatible Mat"); //FIXME: do we need release() here? } public MatOfRect(Rect... a) { super(); fromArray(a); } public void alloc(int elemNumber) { if (elemNumber > 0) super.create(elemNumber, 1, CvType.makeType(_depth, _channels)); } public void fromArray(Rect... a) { if (a == null || a.length == 0) return; int num = a.length; alloc(num); int buff[] = new int[num * _channels]; for (int i = 0; i < num; i++) { Rect r = a[i]; buff[_channels * i + 0] = (int) r.x; buff[_channels * i + 1] = (int) r.y; buff[_channels * i + 2] = (int) r.width; buff[_channels * i + 3] = (int) r.height; } put(0, 0, buff); //TODO: check ret val! } public Rect[] toArray() { int num = (int) total(); Rect[] a = new Rect[num]; if (num == 0) return a; int buff[] = new int[num * _channels]; get(0, 0, buff); //TODO: check ret val! for (int i = 0; i < num; i++) a[i] = new Rect(buff[i * _channels], buff[i * _channels + 1], buff[i * _channels + 2], buff[i * _channels + 3]); return a; } public void fromList(List<Rect> lr) { Rect ap[] = lr.toArray(new Rect[0]); fromArray(ap); } public List<Rect> toList() { Rect[] ar = toArray(); return Arrays.asList(ar); } }
dilloncc/podchief-android
scanlibrary/src/main/jni/sdk/java/src/org/opencv/core/MatOfRect.java
Java
apache-2.0
2,273
# Copyright 2014 Open Connectome Project (http;//openconnecto.me) # Written by Da Zheng (zhengda1936@gmail.com) # # This file is part of SAFSlib. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. include ../Makefile.common CXXFLAGS += -I.. -I../include -I../libcommon CFLAGS = -g -O3 -I.. -I../include -I../libcommon $(TRACE_FLAGS) LDFLAGS := -L../libsafs -lsafs -L../libcommon -lcommon $(LDFLAGS) LIBFILE = ../libsafs/libsafs.a ../libcommon/libcommon.a TARGETS = SAFS-util all: $(TARGETS) SAFS-util: SAFS-util.o $(LIBFILE) $(CXX) -o SAFS-util SAFS-util.o $(LDFLAGS) clean: rm -f *.o rm -f *~ rm -f *.d rm -f $(TARGETS)
silky/FlashGraph
utils/Makefile
Makefile
apache-2.0
1,127
/* main.c - Application main entry point */ /* * Copyright (c) 2016 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <stdint.h> #include <stddef.h> #include <string.h> #include <errno.h> #include <misc/printk.h> #include <misc/byteorder.h> #include <zephyr.h> #include <bluetooth/bluetooth.h> #include <bluetooth/hci.h> #include <bluetooth/conn.h> #include <bluetooth/uuid.h> #include <bluetooth/gatt.h> #include <gatt/gap.h> #include <gatt/dis.h> #include <gatt/bas.h> #include <gatt/hog.h> #define DEVICE_NAME "Test HoG mouse" #define DEVICE_NAME_LEN (sizeof(DEVICE_NAME) - 1) #define HOG_APPEARANCE 0x03c2 static const struct bt_data ad[] = { BT_DATA_BYTES(BT_DATA_FLAGS, (BT_LE_AD_GENERAL | BT_LE_AD_NO_BREDR)), BT_DATA_BYTES(BT_DATA_UUID16_ALL, 0x12, 0x18, /* HID Service */ 0x0f, 0x18), /* Battery Service */ }; static const struct bt_data sd[] = { BT_DATA(BT_DATA_NAME_COMPLETE, DEVICE_NAME, DEVICE_NAME_LEN), }; static void connected(struct bt_conn *conn, uint8_t err) { char addr[BT_ADDR_LE_STR_LEN]; bt_addr_le_to_str(bt_conn_get_dst(conn), addr, sizeof(addr)); if (err) { printk("Failed to connect to %s (%u)\n", addr, err); return; } printk("Connected %s\n", addr); if (bt_conn_security(conn, BT_SECURITY_MEDIUM)) { printk("Failed to set security\n"); } } static void disconnected(struct bt_conn *conn, uint8_t reason) { char addr[BT_ADDR_LE_STR_LEN]; bt_addr_le_to_str(bt_conn_get_dst(conn), addr, sizeof(addr)); printk("Disconnected from %s (reason %u)\n", addr, reason); } static void security_changed(struct bt_conn *conn, bt_security_t level) { char addr[BT_ADDR_LE_STR_LEN]; bt_addr_le_to_str(bt_conn_get_dst(conn), addr, sizeof(addr)); printk("Security changed: %s level %u\n", addr, level); } static struct bt_conn_cb conn_callbacks = { .connected = connected, .disconnected = disconnected, .security_changed = security_changed, }; static void bt_ready(int err) { if (err) { printk("Bluetooth init failed (err %d)\n", err); return; } printk("Bluetooth initialized\n"); gap_init(DEVICE_NAME, HOG_APPEARANCE); bas_init(); dis_init(CONFIG_SOC, "Manufacturer"); hog_init(); err = bt_le_adv_start(BT_LE_ADV_CONN, ad, ARRAY_SIZE(ad), sd, ARRAY_SIZE(sd)); if (err) { printk("Advertising failed to start (err %d)\n", err); return; } printk("Advertising successfully started\n"); } static void auth_passkey_display(struct bt_conn *conn, unsigned int passkey) { char addr[BT_ADDR_LE_STR_LEN]; bt_addr_le_to_str(bt_conn_get_dst(conn), addr, sizeof(addr)); printk("Passkey for %s: %u\n", addr, passkey); } static void auth_cancel(struct bt_conn *conn) { char addr[BT_ADDR_LE_STR_LEN]; bt_addr_le_to_str(bt_conn_get_dst(conn), addr, sizeof(addr)); printk("Pairing cancelled: %s\n", addr); } static struct bt_conn_auth_cb auth_cb_display = { .passkey_display = auth_passkey_display, .passkey_entry = NULL, .cancel = auth_cancel, }; void main(void) { int err; err = bt_enable(bt_ready); if (err) { printk("Bluetooth init failed (err %d)\n", err); return; } bt_conn_cb_register(&conn_callbacks); bt_conn_auth_cb_register(&auth_cb_display); }
Jason0204/jasontek_f103rb-zephyrOS-project
samples/bluetooth/peripheral_hids/src/main.c
C
apache-2.0
3,712
package com.demo.processor; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.demo.demorules.BrmsInsertElement; import com.demo.demorules.BrmsPayload; import com.demo.demorules.FireRulesObject; import com.demo.demorules.InsertElementWrapper; import com.demo.demorules.PsiData; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonObject; import com.google.gson.JsonParser; public class BrmsProcessor implements Processor { Logger log=LoggerFactory.getLogger(getClass()); @Override public void process(Exchange exchg) throws Exception { String output=new String((byte[])exchg.getIn().getBody()); log.info(output); JsonParser parser=new JsonParser(); JsonObject value=parser.parse(output).getAsJsonObject(); log.info(value.get("psi").getAsString()); int reading=Integer.valueOf(value.get("psi").getAsString()); Gson gson=new GsonBuilder().setPrettyPrinting().create(); BrmsInsertElement insert=new BrmsInsertElement(); InsertElementWrapper insertElementWrapper=new InsertElementWrapper(); insertElementWrapper.setInsert(insert); FireRulesObject fireRulesObject=new FireRulesObject(); PsiData psi=new PsiData(); psi.setReading(reading); insert.getObject().setPsiDate(psi); BrmsPayload brms=new BrmsPayload(insertElementWrapper,fireRulesObject); String payload=new Gson().toJson(brms); log.info(payload); exchg.getIn().setBody(payload); } }
wohshon/fis-integration
src/main/java/com/demo/processor/BrmsProcessor.java
Java
apache-2.0
1,511
package me.toptas.jobseasy.app; import java.util.HashMap; import java.util.List; import javax.inject.Inject; import me.toptas.jobseasy.model.RssItem; public class SessionData { @Inject public SessionData() { } private final HashMap<String, List<RssItem>> mContentMap = new HashMap<>(); public boolean hasUrl(String url) { return mContentMap.containsKey(url); } public void addContent(String url, List<RssItem> items) { mContentMap.put(url, items); } public List<RssItem> getContent(String url) { return mContentMap.get(url); } }
bablumon/EasyJobsMain
app/src/main/java/me/toptas/jobseasy/app/SessionData.java
Java
apache-2.0
601
// // WMCommentViewController.h // 百思不得姐 // // Created by 王蒙 on 15/8/2. // Copyright (c) 2015年 wm. All rights reserved. // #import <UIKit/UIKit.h> @class WMWordToip; @interface WMCommentViewController : UIViewController /**帖子模型*/ @property (nonatomic,strong) WMWordToip *wordToip; @end
sdhzwm/Baijie-OC-
百思不得姐/百思不得姐/Classes/Essence(精华)/Controller/WMCommentViewController.h
C
apache-2.0
315
/////////////////////////////////////////////////////////////////////////////// // // Copyright (c) Crossbar.io Technologies GmbH and contributors // // Boost Software License - Version 1.0 - August 17th, 2003 // // Permission is hereby granted, free of charge, to any person or organization // obtaining a copy of the software and accompanying documentation covered by // this license (the "Software") to use, reproduce, display, distribute, // execute, and transmit the Software, and to prepare derivative works of the // Software, and to permit third-parties to whom the Software is furnished to // do so, all subject to the following: // // The copyright notices in the Software and this entire statement, including // the above license grant, this restriction and the following disclaimer, // must be included in all copies of the Software, in whole or in part, and // all derivative works of the Software, unless such copies or derivative // works are solely in the form of machine-executable object code generated by // a source language processor. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT // SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE // FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, // ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // /////////////////////////////////////////////////////////////////////////////// #include "parameters.hpp" #include <cstdlib> #include <boost/asio/ip/address.hpp> #include <boost/program_options.hpp> #include <iostream> namespace { const std::string LOCALHOST_IP_ADDRESS_STRING("127.0.0.1"); const boost::asio::ip::address LOCALHOST_IP_ADDRESS( boost::asio::ip::address::from_string(LOCALHOST_IP_ADDRESS_STRING)); const std::string DEFAULT_REALM("realm1"); const uint16_t DEFAULT_RAWSOCKET_PORT(8000); const std::string DEFAULT_UDS_PATH("/tmp/crossbar.sock"); } parameters::parameters() : m_debug(false) , m_realm(DEFAULT_REALM) , m_rawsocket_endpoint(LOCALHOST_IP_ADDRESS, DEFAULT_RAWSOCKET_PORT) #ifdef BOOST_ASIO_HAS_LOCAL_SOCKETS , m_uds_endpoint(DEFAULT_UDS_PATH) #endif { } bool parameters::debug() const { return m_debug; } const std::string& parameters::realm() const { return m_realm; } const boost::asio::ip::tcp::endpoint& parameters::rawsocket_endpoint() const { return m_rawsocket_endpoint; } #ifdef BOOST_ASIO_HAS_LOCAL_SOCKETS const boost::asio::local::stream_protocol::endpoint& parameters::uds_endpoint() const { return m_uds_endpoint; } #endif void parameters::set_debug(bool value) { m_debug = value; } void parameters::set_realm(const std::string& realm) { m_realm = realm; } void parameters::set_rawsocket_endpoint(const std::string& ip_address, uint16_t port) { m_rawsocket_endpoint = boost::asio::ip::tcp::endpoint( boost::asio::ip::address::from_string(ip_address), port); } #ifdef BOOST_ASIO_HAS_LOCAL_SOCKETS void parameters::set_uds_endpoint(const std::string& path) { m_uds_endpoint = boost::asio::local::stream_protocol::endpoint(path); } #endif std::unique_ptr<parameters> get_parameters(int argc, char** argv) { std::unique_ptr<parameters> params(new parameters); namespace po = boost::program_options; po::options_description description("options"); description.add_options() ("help", "Display this help message") ("debug,d", po::bool_switch()->default_value(false), "Enable debug logging.") ("realm,r", po::value<std::string>()->default_value(DEFAULT_REALM), "The realm to join on the wamp router.") ("uds-path,u", po::value<std::string>()->default_value(DEFAULT_UDS_PATH), "The unix domain socket path the wamp router is listening for connections on.") ("rawsocket-ip,h", po::value<std::string>()->default_value(LOCALHOST_IP_ADDRESS_STRING), "The ip address of the host running the wamp router.") ("rawsocket-port,p", po::value<uint16_t>()->default_value(DEFAULT_RAWSOCKET_PORT), "The port that the wamp router is listening for connections on."); po::variables_map variables; try { po::store(po::parse_command_line(argc, argv, description), variables); if (variables.count("help")) { std::cout << "Example Parameters" << std::endl << description << std::endl; exit(0); } po::notify(variables); } catch(po::error& e) { std::cerr << "error: " << e.what() << std::endl << std::endl; std::cerr << description << std::endl; exit(-1); } params->set_debug(variables["debug"].as<bool>()); params->set_realm(variables["realm"].as<std::string>()); params->set_rawsocket_endpoint( variables["rawsocket-ip"].as<std::string>(), variables["rawsocket-port"].as<uint16_t>()); #ifdef BOOST_ASIO_HAS_LOCAL_SOCKETS params->set_uds_endpoint( variables["uds-path"].as<std::string>()); #endif return params; }
tavendo/AutobahnCpp
examples/parameters.cpp
C++
apache-2.0
5,326
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="google-site-verification" content="xBT4GhYoi5qRD5tr338pgPM5OWHHIDR6mNg1a3euekI" /> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="description" content="构造出一个个数据和逻辑的世界"> <meta name="keyword" content=""> <link rel="shortcut icon" href="/huxblog-boilerplate/img/favicon.ico"> <title>Example Post with Code Highlight - 你的博客 | Your Blog</title> <link rel="canonical" href="https://huangxuan.me/huxblog-boilerplate/2015/05/25/js-module-loader/"> <!-- Bootstrap Core CSS --> <link rel="stylesheet" href="/huxblog-boilerplate/css/bootstrap.min.css"> <!-- Custom CSS --> <link rel="stylesheet" href="/huxblog-boilerplate/css/hux-blog.min.css"> <!-- Pygments Github CSS --> <link rel="stylesheet" href="/huxblog-boilerplate/css/syntax.css"> <!-- Custom Fonts --> <!-- <link href="http://maxcdn.bootstrapcdn.com/font-awesome/4.3.0/css/font-awesome.min.css" rel="stylesheet" type="text/css"> --> <!-- Hux change font-awesome CDN to qiniu --> <link href="http://cdn.staticfile.org/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet" type="text/css"> <!-- Hux Delete, sad but pending in China <link href='http://fonts.googleapis.com/css?family=Lora:400,700,400italic,700italic' rel='stylesheet' type='text/css'> <link href='http://fonts.googleapis.com/css?family=Open+Sans:300italic,400italic,600italic,700italic,800italic,400,300,600,700,800' rel='stylesheet' type='text/ css'> --> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script> <script src="https://oss.maxcdn.com/libs/respond.js/1.4.2/respond.min.js"></script> <![endif]--> <!-- ga & ba script hoook --> <script></script> </head> <!-- hack iOS CSS :active style --> <body ontouchstart=""> <!-- Navigation --> <nav class="navbar navbar-default navbar-custom navbar-fixed-top"> <div class="container-fluid"> <!-- Brand and toggle get grouped for better mobile display --> <div class="navbar-header page-scroll"> <button type="button" class="navbar-toggle"> <span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> <a class="navbar-brand" href="/huxblog-boilerplate/">GorCat Blog</a> </div> <!-- Collect the nav links, forms, and other content for toggling --> <!-- Known Issue, found by Hux: <nav>'s height woule be hold on by its content. so, when navbar scale out, the <nav> will cover tags. also mask any touch event of tags, unfortunately. --> <div id="huxblog_navbar"> <div class="navbar-collapse"> <ul class="nav navbar-nav navbar-right"> <li> <a href="/huxblog-boilerplate/">Home</a> </li> <li> <a href="/huxblog-boilerplate/about/">About</a> </li> <li> <a href="/huxblog-boilerplate/tags/">Tags</a> </li> </ul> </div> </div> <!-- /.navbar-collapse --> </div> <!-- /.container --> </nav> <script> // Drop Bootstarp low-performance Navbar // Use customize navbar with high-quality material design animation // in high-perf jank-free CSS3 implementation var $body = document.body; var $toggle = document.querySelector('.navbar-toggle'); var $navbar = document.querySelector('#huxblog_navbar'); var $collapse = document.querySelector('.navbar-collapse'); $toggle.addEventListener('click', handleMagic) function handleMagic(e){ if ($navbar.className.indexOf('in') > 0) { // CLOSE $navbar.className = " "; // wait until animation end. setTimeout(function(){ // prevent frequently toggle if($navbar.className.indexOf('in') < 0) { $collapse.style.height = "0px" } },400) }else{ // OPEN $collapse.style.height = "auto" $navbar.className += " in"; } } </script> <!-- Image to hack wechat --> <!-- <img src="/img/icon_wechat.png" width="0" height="0"> --> <!-- <img src="/huxblog-boilerplate/img/post-bg-js-module.jpg" width="0" height="0"> --> <!-- Post Header --> <style type="text/css"> header.intro-header{ background-image: url('/huxblog-boilerplate/img/post-bg-js-module.jpg') } </style> <header class="intro-header" > <div class="container"> <div class="row"> <div class="col-lg-8 col-lg-offset-2 col-md-10 col-md-offset-1"> <div class="post-heading"> <div class="tags"> <a class="tag" href="/huxblog-boilerplate/tags/#前端开发" title="前端开发">前端开发</a> <a class="tag" href="/huxblog-boilerplate/tags/#JavaScript" title="JavaScript">JavaScript</a> </div> <h1>Example Post with Code Highlight</h1> <h2 class="subheading">CommonJS,RequireJS,SeaJS 归纳笔记</h2> <span class="meta">Posted by Hux on May 25, 2015</span> </div> </div> </div> </div> </header> <!-- Post Content --> <article> <div class="container"> <div class="row"> <!-- Post Container --> <div class=" col-lg-8 col-lg-offset-2 col-md-10 col-md-offset-1 post-container"> <h2 id="foreword">Foreword</h2> <blockquote> <p>Here comes Module!</p> </blockquote> <p>随着网站逐渐变成「互联网应用程序」,嵌入网页的 JavaScript 代码越来越庞大,越来越复杂。网页越来越像桌面程序,需要一个团队分工协作、进度管理、单元测试……我们不得不使用软件工程的方法,来管理网页的业务逻辑。</p> <p>于是,JavaScript 的模块化成为迫切需求。在 ES6 Module 来临之前,JavaScript 社区提供了强大支持,尝试在现有的运行环境下,实现模块的效果。</p> <hr /> <h2 id="catalog">Catalog</h2> <ol> <li><a href="#commonjs--node">CommonJS &amp; Node</a></li> <li><a href="#history">History</a></li> <li><a href="#requirejs--amd">RequireJS &amp; AMD</a></li> <li><a href="#seajs--cmd">SeaJS &amp; CMD</a></li> <li><a href="#amd-vs-cmd">AMD vs CMD</a></li> <li><a href="#webpack">WebPack</a></li> </ol> <h2 id="commonjs--node">CommonJS &amp; Node</h2> <blockquote> <p>Javascript: not just for browsers any more! —— CommonJS Slogen</p> </blockquote> <p>前端模块化的事实标准之一,2009 年 8 月,<a href="http://wiki.commonjs.org/wiki/CommonJS">CommonJS</a> 诞生。</p> <p>CommonJS 本质上只是一套规范(API 定义),而 Node.js 采用并实现了部分规范,CommonJS Module 的写法也因此广泛流行。</p> <p>让我们看看 Node 中的实现:</p> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="c1">// 由于 Node 原生支持模块的作用域,并不需要额外的 wrapper</span> <span class="c1">// "as though the module was wrapped in a function"</span> <span class="kd">var</span> <span class="nx">a</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s1">'./a'</span><span class="p">)</span> <span class="c1">// 加载模块(同步加载)</span> <span class="nx">a</span><span class="p">.</span><span class="nx">doSomething</span><span class="p">()</span> <span class="c1">// 等上一句执行完才会执行</span> <span class="nx">exports</span><span class="p">.</span><span class="nx">b</span> <span class="o">=</span> <span class="kd">function</span><span class="p">(){</span> <span class="c1">// 暴露 b 函数接口</span> <span class="c1">// do something</span> <span class="p">}</span> </code></pre> </div> <p><code class="highlighter-rouge">exports</code>是一个内置对象,就像<code class="highlighter-rouge">require</code>是一个内置加载函数一样。如果你希望直接赋值一个完整的对象或者构造函数,覆写<code class="highlighter-rouge">module.exports</code>就可以了。</p> <p>CommonJS 前身叫 ServerJS ,<strong>后来希望能更加 COMMON,成为通吃各种环境的模块规范,改名为 CommonJS</strong> 。CommonJS 最初只专注于 Server-side 而非浏览器环境,因此它采用了同步加载的机制,这对服务器环境(硬盘 I/O 速度)不是问题,而对浏览器环境(网速)来说并不合适。</p> <p>因此,各种适用于浏览器环境的模块框架与标准逐个诞生,他们的共同点是:</p> <ul> <li>采用异步加载(预先加载所有依赖的模块后回调执行,符合浏览器的网络环境)</li> <li>虽然代码风格不同,但其实都可以看作 CommonJS Modules 语法的变体。</li> <li>都在向着 <strong>COMMON</strong> 的方向进化:<strong>兼容不同风格,兼容浏览器和服务器两种环境</strong></li> </ul> <p>本文接下来要讨论的典例是:</p> <ul> <li>RequireJS &amp; AMD(异步加载,预执行,依赖前置。默认推荐 AMD 写法)</li> <li>SeaJS &amp; CMD(异步加载,懒执行,依赖就近,默认推荐 CommonJS 写法)</li> </ul> <h2 id="history">History</h2> <!--<h2 id="history"> History </h2>--> <blockquote> <p>此段落参考自玉伯的 <a href="https://github.com/seajs/seajs/issues/588">前端模块化开发那点历史</a></p> </blockquote> <p>09-10 年间,CommonJS(那时还叫 ServerJS) 社区推出 <a href="http://wiki.commonjs.org/wiki/Modules">Modules/1.0</a> 规范,并且在 Node.js 等环境下取得了很不错的实践。</p> <p>09年下半年这帮充满干劲的小伙子们想把 ServerJS 的成功经验进一步推广到浏览器端,于是将社区改名叫 CommonJS,同时激烈争论 Modules 的下一版规范。分歧和冲突由此诞生,逐步形成了三大流派:</p> <ol> <li><strong>Modules/1.x</strong> 流派。这个观点觉得 1.x 规范已经够用,只要移植到浏览器端就好。要做的是新增 <a href="http://wiki.commonjs.org/wiki/Modules/Transport">Modules/Transport</a> 规范,即在浏览器上运行前,先通过转换工具将模块转换为符合 Transport 规范的代码。主流代表是服务端的开发人员。现在值得关注的有两个实现:越来越火的 component 和走在前沿的 es6 module transpiler。</li> <li><strong>Modules/Async</strong> 流派。这个观点觉得浏览器有自身的特征,不应该直接用 Modules/1.x 规范。这个观点下的典型代表是 <a href="http://wiki.commonjs.org/wiki/Modules/AsynchronousDefinition">AMD</a> 规范及其实现 <a href="http://requirejs.org/">RequireJS</a>。这个稍后再细说。</li> <li><strong>Modules/2.0</strong> 流派。这个观点觉得浏览器有自身的特征,不应该直接用 Modules/1.x 规范,但应该尽可能与 Modules/1.x 规范保持一致。这个观点下的典型代表是 BravoJS 和 FlyScript 的作者。BravoJS 作者对 CommonJS 的社区的贡献很大,这份 Modules/2.0-draft 规范花了很多心思。FlyScript 的作者提出了 Modules/Wrappings 规范,这规范是 CMD 规范的前身。可惜的是 BravoJS 太学院派,FlyScript 后来做了自我阉割,将整个网站(flyscript.org)下线了。这个观点在本文中的典型代表就是 SeaJS 和 CMD 了</li> </ol> <p>补一嘴:阿里 KISSY 的 KMD 其实跟 AMD 非常类似,只是用 <code class="highlighter-rouge">add</code>和<code class="highlighter-rouge">use</code> 两个源自于 YUI Modules 的函数名替换了 <code class="highlighter-rouge">define</code> 和 <code class="highlighter-rouge">require</code> ,但其原理更接近 RequireJS ,与 YUI Modules 的 <code class="highlighter-rouge">Y</code> 沙箱 Attach 机制并不相同</p> <h2 id="requirejs--amd">RequireJS &amp; AMD</h2> <p><a href="http://wiki.commonjs.org/wiki/Modules/AsynchronousDefinition">AMD (Async Module Definition)</a> 是 RequireJS 在推广过程中对模块定义的规范化产出。</p> <blockquote> <p>RequireJS is a JavaScript file and module loader. It is optimized for in-browser use, but it can be used in other JavaScript environments</p> </blockquote> <p>RequireJS 主要解决的还是 CommonJS 同步加载脚本不适合浏览器 这个问题:</p> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="c1">//CommonJS</span> <span class="kd">var</span> <span class="nx">Employee</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s2">"types/Employee"</span><span class="p">);</span> <span class="kd">function</span> <span class="nx">Programmer</span> <span class="p">(){</span> <span class="c1">//do something</span> <span class="p">}</span> <span class="nx">Programmer</span><span class="p">.</span><span class="nx">prototype</span> <span class="o">=</span> <span class="k">new</span> <span class="nx">Employee</span><span class="p">();</span> <span class="c1">//如果 require call 是异步的,那么肯定 error</span> <span class="c1">//因为在执行这句前 Employee 模块肯定来不及加载进来</span> </code></pre> </div> <blockquote> <p>As the comment indicates above, if require() is async, this code will not work. However, loading scripts synchronously in the browser kills performance. So, what to do?</p> </blockquote> <p>所以我们需要 <strong>Function Wrapping</strong> 来获取依赖并且提前通过 script tag 提前加载进来</p> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="c1">//AMD Wrapper</span> <span class="nx">define</span><span class="p">(</span> <span class="p">[</span><span class="nx">types</span><span class="o">/</span><span class="nx">Employee</span><span class="p">],</span> <span class="c1">//依赖</span> <span class="kd">function</span><span class="p">(</span><span class="nx">Employee</span><span class="p">){</span> <span class="c1">//这个回调会在所有依赖都被加载后才执行</span> <span class="kd">function</span> <span class="nx">Programmer</span><span class="p">(){</span> <span class="c1">//do something</span> <span class="p">};</span> <span class="nx">Programmer</span><span class="p">.</span><span class="nx">prototype</span> <span class="o">=</span> <span class="k">new</span> <span class="nx">Employee</span><span class="p">();</span> <span class="k">return</span> <span class="nx">Programmer</span><span class="p">;</span> <span class="c1">//return Constructor</span> <span class="p">}</span> <span class="p">)</span> </code></pre> </div> <p>当依赖模块非常多时,这种<strong>依赖前置</strong>的写法会显得有点奇怪,所以 AMD 给了一个语法糖, <strong>simplified CommonJS wrapping</strong>,借鉴了 CommonJS 的 require 就近风格,也更方便对 CommonJS 模块的兼容:</p> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="nx">define</span><span class="p">(</span><span class="kd">function</span> <span class="p">(</span><span class="nx">require</span><span class="p">)</span> <span class="p">{</span> <span class="kd">var</span> <span class="nx">dependency1</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s1">'dependency1'</span><span class="p">),</span> <span class="nx">dependency2</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s1">'dependency2'</span><span class="p">);</span> <span class="k">return</span> <span class="kd">function</span> <span class="p">()</span> <span class="p">{};</span> <span class="p">});</span> </code></pre> </div> <p>The AMD loader will parse out the <code class="highlighter-rouge">require('')</code> calls by using <code class="highlighter-rouge">Function.prototype.toString()</code>, then internally convert the above define call into this:</p> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="nx">define</span><span class="p">([</span><span class="s1">'require'</span><span class="p">,</span> <span class="s1">'dependency1'</span><span class="p">,</span> <span class="s1">'dependency2'</span><span class="p">],</span> <span class="kd">function</span> <span class="p">(</span><span class="nx">require</span><span class="p">)</span> <span class="p">{</span> <span class="kd">var</span> <span class="nx">dependency1</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s1">'dependency1'</span><span class="p">),</span> <span class="nx">dependency2</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s1">'dependency2'</span><span class="p">);</span> <span class="k">return</span> <span class="kd">function</span> <span class="p">()</span> <span class="p">{};</span> <span class="p">});</span> </code></pre> </div> <p>出于<code class="highlighter-rouge">Function.prototype.toString()</code>兼容性和性能的考虑,最好的做法还是做一次 <strong>optimized build</strong></p> <p>AMD 和 CommonJS 的核心争议如下:</p> <h3 id="section">1. <strong>执行时机</strong></h3> <p>Modules/1.0:</p> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="kd">var</span> <span class="nx">a</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s2">"./a"</span><span class="p">)</span> <span class="c1">// 执行到此时,a.js 才同步下载并执行</span> </code></pre> </div> <p>AMD: (使用 require 的语法糖时)</p> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="nx">define</span><span class="p">([</span><span class="s2">"require"</span><span class="p">],</span><span class="kd">function</span><span class="p">(</span><span class="nx">require</span><span class="p">)){</span> <span class="c1">// 在这里,a.js 已经下载并且执行好了</span> <span class="c1">// 使用 require() 并不是 AMD 的推荐写法</span> <span class="kd">var</span> <span class="nx">a</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s2">"./a"</span><span class="p">)</span> <span class="c1">// 此处仅仅是取模块 a 的 exports</span> <span class="p">})</span> </code></pre> </div> <p>AMD 里提前下载 a.js 是出于对浏览器环境的考虑,只能采取异步下载,这个社区都认可(Sea.js 也是这么做的)</p> <p>但是 AMD 的执行是 Early Executing,而 Modules/1.0 是第一次 require 时才执行。这个差异很多人不能接受,包括持 Modules/2.0 观点的人也不能接受。</p> <h3 id="section-1">2. <strong>书写风格</strong></h3> <p>AMD 推荐的风格并不使用<code class="highlighter-rouge">require</code>,而是通过参数传入,破坏了<strong>依赖就近</strong>:</p> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="nx">define</span><span class="p">([</span><span class="s2">"a"</span><span class="p">,</span> <span class="s2">"b"</span><span class="p">,</span> <span class="s2">"c"</span><span class="p">],</span><span class="kd">function</span><span class="p">(</span><span class="nx">a</span><span class="p">,</span> <span class="nx">b</span><span class="p">,</span> <span class="nx">c</span><span class="p">){</span> <span class="c1">// 提前申明了并初始化了所有模块</span> <span class="kc">true</span> <span class="o">||</span> <span class="nx">b</span><span class="p">.</span><span class="nx">foo</span><span class="p">();</span> <span class="c1">//即便根本没用到模块 b,但 b 还是提前执行了。</span> <span class="p">})</span> </code></pre> </div> <p>不过,在笔者看来,风格喜好因人而异,主要还是<strong>预执行</strong>和<strong>懒执行</strong>的差异。</p> <p>另外,require 2.0 也开始思考异步处理<strong>软依赖</strong>(区别于一定需要的<strong>硬依赖</strong>)的问题,提出了这样的方案:</p> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="c1">// 函数体内:</span> <span class="k">if</span><span class="p">(</span><span class="nx">status</span><span class="p">){</span> <span class="nx">async</span><span class="p">([</span><span class="s1">'a'</span><span class="p">],</span><span class="kd">function</span><span class="p">(</span><span class="nx">a</span><span class="p">){</span> <span class="nx">a</span><span class="p">.</span><span class="nx">doSomething</span><span class="p">()</span> <span class="p">})</span> <span class="p">}</span> </code></pre> </div> <h2 id="seajs--cmd">SeaJS &amp; CMD</h2> <p>CMD (Common Module Definition) 是 <a href="http://seajs.org/docs/">SeaJS</a> 在推广过程中对模块定义的规范化产出,是 Modules/2.0 流派的支持者,因此 SeaJS 的模块写法尽可能与 Modules/1.x 规范保持一致。</p> <p>不过目前国外的该流派都死得差不多了,RequireJS 目前成为浏览器端模块的事实标准,国内最有名气的就是玉伯的 Sea.js ,不过对国际的推广力度不够。</p> <ul> <li>CMD Specification <ul> <li><a href="https://github.com/cmdjs/specification/blob/master/draft/module.md">English (CMDJS-repo)</a></li> <li><a href="https://github.com/seajs/seajs/issues/242">Chinese (SeaJS-repo)</a></li> </ul> </li> </ul> <p>CMD 主要有 define, factory, require, export 这么几个东西</p> <ul> <li>define <code class="highlighter-rouge">define(id?, deps?, factory)</code></li> <li>factory <code class="highlighter-rouge">factory(require, exports, module)</code></li> <li>require <code class="highlighter-rouge">require(id)</code></li> <li>exports <code class="highlighter-rouge">Object</code></li> </ul> <p>CMD 推荐的 Code Style 是使用 CommonJS 风格的 <code class="highlighter-rouge">require</code>:</p> <ul> <li>这个 require 实际上是一个全局函数,用于加载模块,这里实际就是传入而已</li> </ul> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="nx">define</span><span class="p">(</span><span class="kd">function</span><span class="p">(</span><span class="nx">require</span><span class="p">,</span> <span class="nx">exports</span><span class="p">)</span> <span class="p">{</span> <span class="c1">// 获取模块 a 的接口</span> <span class="kd">var</span> <span class="nx">a</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s1">'./a'</span><span class="p">);</span> <span class="c1">// 调用模块 a 的方法</span> <span class="nx">a</span><span class="p">.</span><span class="nx">doSomething</span><span class="p">();</span> <span class="c1">// 对外提供 foo 属性</span> <span class="nx">exports</span><span class="p">.</span><span class="nx">foo</span> <span class="o">=</span> <span class="s1">'bar'</span><span class="p">;</span> <span class="c1">// 对外提供 doSomething 方法</span> <span class="nx">exports</span><span class="p">.</span><span class="nx">doSomething</span> <span class="o">=</span> <span class="kd">function</span><span class="p">()</span> <span class="p">{};</span> <span class="p">});</span> </code></pre> </div> <p>但是你也可以使用 AMD 风格,或者使用 return 来进行模块暴露</p> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="nx">define</span><span class="p">(</span><span class="s1">'hello'</span><span class="p">,</span> <span class="p">[</span><span class="s1">'jquery'</span><span class="p">],</span> <span class="kd">function</span><span class="p">(</span><span class="nx">require</span><span class="p">,</span> <span class="nx">exports</span><span class="p">,</span> <span class="nx">module</span><span class="p">)</span> <span class="p">{</span> <span class="c1">// 模块代码...</span> <span class="c1">// 直接通过 return 暴露接口</span> <span class="k">return</span> <span class="p">{</span> <span class="na">foo</span><span class="p">:</span> <span class="s1">'bar'</span><span class="p">,</span> <span class="na">doSomething</span><span class="p">:</span> <span class="kd">function</span><span class="p">()</span> <span class="p">{}</span> <span class="p">};</span> <span class="p">});</span> </code></pre> </div> <p>Sea.js 借鉴了 RequireJS 的不少东西,比如将 FlyScript 中的 module.declare 改名为 define 等。Sea.js 更多地来自 Modules/2.0 的观点,但尽可能去掉了学院派的东西,加入了不少实战派的理念。</p> <h2 id="amd-vs-cmd">AMD vs CMD</h2> <p><strong>虽然两者目前都兼容各种风格,但其底层原理并不相同,从其分别推荐的写法就可以看出两者背后原理的不同:</strong></p> <ol> <li>对于依赖的模块,AMD 是<strong>提前执行</strong>,CMD 是<strong>懒执行</strong>。(都是先加载) <ul> <li>CMD 推崇<strong>依赖就近</strong>,AMD 推崇<strong>依赖前置</strong>。</li> </ul> </li> </ol> <p>看代码:</p> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="c1">// AMD 默认推荐</span> <span class="nx">define</span><span class="p">([</span><span class="s1">'./a'</span><span class="p">,</span> <span class="s1">'./b'</span><span class="p">],</span> <span class="kd">function</span><span class="p">(</span><span class="nx">a</span><span class="p">,</span> <span class="nx">b</span><span class="p">)</span> <span class="p">{</span> <span class="c1">// 依赖前置,提前执行</span> <span class="nx">a</span><span class="p">.</span><span class="nx">doSomething</span><span class="p">()</span> <span class="nx">b</span><span class="p">.</span><span class="nx">doSomething</span><span class="p">()</span> <span class="p">})</span> </code></pre> </div> <div class="language-js highlighter-rouge"><pre class="highlight"><code><span class="c1">// CMD</span> <span class="nx">define</span><span class="p">(</span><span class="kd">function</span><span class="p">(</span><span class="nx">require</span><span class="p">,</span> <span class="nx">exports</span><span class="p">,</span> <span class="nx">module</span><span class="p">)</span> <span class="p">{</span> <span class="kd">var</span> <span class="nx">a</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s1">'./a'</span><span class="p">)</span> <span class="nx">a</span><span class="p">.</span><span class="nx">doSomething</span><span class="p">()</span> <span class="kd">var</span> <span class="nx">b</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s1">'./b'</span><span class="p">)</span> <span class="c1">// 依赖就近,延迟执行</span> <span class="nx">b</span><span class="p">.</span><span class="nx">doSomething</span><span class="p">()</span> <span class="p">})</span> </code></pre> </div> <h2 id="webpack">WebPack</h2> <blockquote> <p>working…</p> </blockquote> <hr> <!-- 多说 Share start --> </style> <div class="ds-share" style="text-align: right" data-thread-key="/2015/05/25/js-module-loader" data-title="Example Post with Code Highlight" data-url="https://huangxuan.me/huxblog-boilerplate/2015/05/25/js-module-loader/" data-images="https://huangxuan.me/huxblog-boilerplate/img/post-bg-js-module.jpg" data-content="Foreword Here comes Module! 随着网站逐渐变成「互联网应用程序」,嵌入网页的 JavaScript 代码越来越庞大,... | 你的博客 | Your Blog " > <div class="ds-share-inline"> <ul class="ds-share-icons-16"> <li data-toggle="ds-share-icons-more"><a class="ds-more" href="#">分享到:</a></li> <li><a class="ds-wechat flat" href="javascript:void(0);" data-service="wechat">微信</a></li> <li><a class="ds-weibo flat" href="javascript:void(0);" data-service="weibo">微博</a></li> <li><a class="ds-douban flat" href="javascript:void(0);" data-service="douban">豆瓣</a></li> </ul> <div class="ds-share-icons-more"> </div> </div> <hr> </div> <!-- 多说 Share end--> <ul class="pager"> <li class="previous"> <a href="/huxblog-boilerplate/2015/04/14/unix-linux-note/" data-toggle="tooltip" data-placement="top" title="Example Post">&larr; Previous Post</a> </li> <li class="next"> <a href="/huxblog-boilerplate/2015/07/09/js-module-7day/" data-toggle="tooltip" data-placement="top" title="Example Post using Keynote Layout">Next Post &rarr;</a> </li> </ul> <!-- 多说评论框 start --> <div class="comment"> <div class="ds-thread" data-thread-key="/2015/05/25/js-module-loader" data-title="Example Post with Code Highlight" data-url="https://huangxuan.me/huxblog-boilerplate/2015/05/25/js-module-loader/" > </div> </div> <!-- 多说评论框 end --> </div> <!-- Sidebar Container --> <div class=" col-lg-8 col-lg-offset-2 col-md-10 col-md-offset-1 sidebar-container"> <!-- Featured Tags --> <section> <hr class="hidden-sm hidden-xs"> <h5><a href="/tags/">FEATURED TAGS</a></h5> <div class="tags"> <a href="/huxblog-boilerplate/tags/#前端开发" title="前端开发" rel="3"> 前端开发 </a> <a href="/huxblog-boilerplate/tags/#JavaScript" title="JavaScript" rel="3"> JavaScript </a> </div> </section> <!-- Friends Blog --> <hr> <h5>FRIENDS</h5> <ul class="list-inline"> <li><a href="http://huangxuan.me">Hux Blog</a></li> <li><a href="#">Foo</a></li> <li><a href="#">Bar</a></li> <li><a href="#">Example Friends</a></li> <li><a href="#">It helps SEO</a></li> </ul> </div> </div> </div> </article> <!-- 多说公共JS代码 start (一个网页只需插入一次) --> <script type="text/javascript"> // dynamic User by Hux var _user = 'huxblog'; // duoshuo comment query. var duoshuoQuery = {short_name: _user }; (function() { var ds = document.createElement('script'); ds.type = 'text/javascript';ds.async = true; ds.src = (document.location.protocol == 'https:' ? 'https:' : 'http:') + '//static.duoshuo.com/embed.js'; ds.charset = 'UTF-8'; (document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(ds); })(); </script> <!-- 多说公共JS代码 end --> <!-- async load function --> <script> function async(u, c) { var d = document, t = 'script', o = d.createElement(t), s = d.getElementsByTagName(t)[0]; o.src = u; if (c) { o.addEventListener('load', function (e) { c(null, e); }, false); } s.parentNode.insertBefore(o, s); } </script> <!-- anchor-js, Doc:http://bryanbraun.github.io/anchorjs/ --> <script> async("http://cdn.bootcss.com/anchor-js/1.1.1/anchor.min.js",function(){ anchors.options = { visible: 'always', placement: 'right', icon: '#' }; anchors.add().remove('.intro-header h1').remove('.subheading').remove('.sidebar-container h5'); }) </script> <style> /* place left on bigger screen */ @media all and (min-width: 800px) { .anchorjs-link{ position: absolute; left: -0.75em; font-size: 1.1em; margin-top : -0.1em; } } </style> <!-- Footer --> <footer> <div class="container"> <div class="row"> <div class="col-lg-8 col-lg-offset-2 col-md-10 col-md-offset-1"> <ul class="list-inline text-center"> <!-- add Weibo, Zhihu by Hux, add target = "_blank" to <a> by Hux --> <li> <a target="_blank" href="http://weibo.com/GorCat"> <span class="fa-stack fa-lg"> <i class="fa fa-circle fa-stack-2x"></i> <i class="fa fa-weibo fa-stack-1x fa-inverse"></i> </span> </a> </li> <li> <a target="_blank" href="https://github.com/GorCat"> <span class="fa-stack fa-lg"> <i class="fa fa-circle fa-stack-2x"></i> <i class="fa fa-github fa-stack-1x fa-inverse"></i> </span> </a> </li> </ul> <p class="copyright text-muted"> Copyright &copy; GorCat Blog 2016 <br> Theme by <a href="http://huangxuan.me">Hux</a> | <iframe style="margin-left: 2px; margin-bottom:-5px;" frameborder="0" scrolling="0" width="91px" height="20px" src="https://ghbtns.com/github-btn.html?user=huxpro&repo=huxpro.github.io&type=star&count=true" > </iframe> </p> </div> </div> </div> </footer> <!-- jQuery --> <script src="/huxblog-boilerplate/js/jquery.min.js "></script> <!-- Bootstrap Core JavaScript --> <script src="/huxblog-boilerplate/js/bootstrap.min.js "></script> <!-- Custom Theme JavaScript --> <script src="/huxblog-boilerplate/js/hux-blog.min.js "></script> <!-- async load function --> <script> function async(u, c) { var d = document, t = 'script', o = d.createElement(t), s = d.getElementsByTagName(t)[0]; o.src = u; if (c) { o.addEventListener('load', function (e) { c(null, e); }, false); } s.parentNode.insertBefore(o, s); } </script> <!-- Because of the native support for backtick-style fenced code blocks right within the Markdown is landed in Github Pages, From V1.6, There is no need for Highlight.js, so Huxblog drops it officially. - https://github.com/blog/2100-github-pages-now-faster-and-simpler-with-jekyll-3-0 - https://help.github.com/articles/creating-and-highlighting-code-blocks/ --> <!-- <script> async("http://cdn.bootcss.com/highlight.js/8.6/highlight.min.js", function(){ hljs.initHighlightingOnLoad(); }) </script> <link href="http://cdn.bootcss.com/highlight.js/8.6/styles/github.min.css" rel="stylesheet"> --> <!-- jquery.tagcloud.js --> <script> // only load tagcloud.js in tag.html if($('#tag_cloud').length !== 0){ async("/js/jquery.tagcloud.js",function(){ $.fn.tagcloud.defaults = { //size: {start: 1, end: 1, unit: 'em'}, color: {start: '#bbbbee', end: '#0085a1'}, }; $('#tag_cloud a').tagcloud(); }) } </script> <!--fastClick.js --> <script> async("http://cdn.bootcss.com/fastclick/1.0.6/fastclick.min.js", function(){ var $nav = document.querySelector("nav"); if($nav) FastClick.attach($nav); }) </script> <!-- Google Analytics --> <script> // dynamic User by Hux var _gaId = 'UA-49627206-1'; var _gaDomain = 'huangxuan.me'; // Originial (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); ga('create', _gaId, _gaDomain); ga('send', 'pageview'); </script> <!-- Baidu Tongji --> <script> // dynamic User by Hux var _baId = '4cc1f2d8f3067386cc5cdb626a202900'; // Originial var _hmt = _hmt || []; (function() { var hm = document.createElement("script"); hm.src = "//hm.baidu.com/hm.js?" + _baId; var s = document.getElementsByTagName("script")[0]; s.parentNode.insertBefore(hm, s); })(); </script> <!-- Image to hack wechat --> <img src="/img/icon_wechat.png" width="0" height="0" /> <!-- Migrate from head to bottom, no longer block render and still work --> </body> </html>
GorCat/gorcat.github.io
_site/2015/05/25/js-module-loader/index.html
HTML
apache-2.0
39,761
using Rabbit.Kernel; using System.Collections.Generic; namespace Rabbit.Web.Routes { /// <summary> /// 一个抽象的路由提供程序。 /// </summary> public interface IRouteProvider : IDependency { /// <summary> /// 获取路由信息。 /// </summary> /// <param name="routes">路由集合。</param> void GetRoutes(ICollection<RouteDescriptor> routes); } }
RabbitTeam/RabbitHub
Rabbit.Web/Routes/IRouteProvider.cs
C#
apache-2.0
431
<div ng-include="'components/navbar/navbar.html'"></div> <div class="container-fluid"> <div class="row"> <div class="col-sm-12"> <div class="column-header"> <!-- <div class="btn-group pull-right"> <label>Sort by</label> <a href="#" class="dropdown-toggle" id="dropdownMenu1" data-toggle="dropdown" aria-expanded="false"> Newest first <span class="caret"></span> </a> <ul class="dropdown-menu" role="menu" aria-labelledby="dropdownMenu1"> <li role="presentation" class="active"><a role="menuitem" tabindex="-1" href="#">Newest first</a></li> <li role="presentation"><a role="menuitem" tabindex="-1" href="#">Oldest first</a></li> </ul> </div> --> <h4> <strong>{{queryResults.length}}</strong> queries for user {{currentUser.username}} </h4> </div> <ul id="queryResults" class="list-group"> <li class="list-group-item" ng-repeat="query in queryResults track by $index"> <div ng-click="toggleListItemOpened(query.id)" class="query-result list-group-item-heading collapsed" tabindex="-1"> <div class="row"> <div class="col-xs-9 col-sm-10 col-lg-11"> {{query.name}} <span class="pull-right"> <ul class="list-unstyled query-options horizontal-list"> <li> <span ng-if="query.notificationHasRun === false" class="notification-badge"> New Results Available </span> </li> <li><button style="btn" ng-click="runQuery(query)"><i class="fa fa-share"></i> Run</button></li> <li><button style="btn" ng-click="deleteQuery(query.id)"><i class="fa fa-trash-o"></i> Delete</button></li> </ul> </span> <div class="list-group-item-meta"> <span>Search Terms: {{query.digState.searchTerms | limitTo: 20}}{{query.digState.searchTerms.length > 20 ? '...' : ''}}</span> <span>Created: {{query.createdAt | date:'MM/dd/yyyy HH:mm:ss'}}</span> <span>Last Run: {{query.lastRunDate | date:'MM/dd/yyyy HH:mm:ss'}}</span> </div> </div> </div> </div> <div collapse="!isListItemOpened(query.id)" class="list-group-item-text collapse"> <div class="row"> <div class="col-xs-9 col-sm-10 col-lg-11"> <h4>Name: {{query.name}}</h4> <span class="pull-right"> <ul class="list-unstyled query-options horizontal-list"> <li><button style="btn" ng-click="runQuery(query)"><i class="fa fa-share"></i> Run</button></li> <li><button style="btn" ng-click="deleteQuery(query.id)"><i class="fa fa-trash-o"></i> Delete</button></li> </ul> </span> <div class="row"> <div class="col-sm-9 query-details"> <dl> <dt>Search Terms</dt> <dd>{{query.digState.searchTerms}}</dd> </dl> <label>Filters</label><br/> <span ng-if="query.digState.filters.withImagesOnly" class="search-facet"> With Images Only </span> <span ng-repeat="aggFilter in facets.aggFilters"> <span ng-if="query.digState.includeMissing.aggregations[aggFilter.field].active" class="search-facet include-missing"> {{aggFilter.title}}: Not Specified </span> <span ng-repeat="(filterKey, selected) in query.digState.filters.aggFilters[aggFilter.field]"> <span ng-if="selected" class="search-facet"> {{aggFilter.title}}: {{filterKey}} </span> </span> </span> <span ng-repeat="textFilter in facets.euiFilters"> <span ng-if="query.digState.filters.textFilters[textFilter.field].submitted.length > 0" class="search-facet"> {{textFilter.title}}: {{query.digState.filters.textFilters[textFilter.field].submitted}} </span> </span> <span ng-repeat="(dateFilter, values) in query.digState.filters.dateFilters"> <span ng-repeat="(filterKey, date) in values"> <span ng-if="date" class="search-facet"> {{filterKey}}: {{ date | date : 'MM/dd/yyyy'}} </span> </span> </span> <dl> <dt>Sort By</dt> <dd>{{query.digState.selectedSort.title}}</dd> </dl> </div> </div> <div class="row"> <div class="col-sm-3 query-details"> <dl> <dt>Created</dt> <dd>{{query.createdAt | date:'MM/dd/yyyy HH:mm:ss'}}</dd> </dl> </div> <div class="col-sm-3 query-details"> <dl> <dt>Last Run</dt> <dd>{{query.lastRunDate | date:'MM/dd/yyyy HH:mm:ss'}}</dd> </dl> </div> <div class="col-sm-3 query-details"> <label>Frequency of Execution</label><br/> <select ng-options="option for option in frequencyOptions" ng-model="query.frequency" ng-change="toggleFrequency(query.id, query.frequency)"> </select> </div> </div> </div> </div> </div> </li> </ul> </div> </div> </div>
codeaudit/dig
client/app/queries/queries.html
HTML
apache-2.0
6,033
// TTTOrdinalNumberFormatter.h // // Copyright (c) 2011 Mattt Thompson (http://mattt.me) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. #import <Foundation/Foundation.h> typedef enum { TTTOrdinalNumberFormatterMaleGender = 1, TTTOrdinalNumberFormatterFemaleGender = 2, TTTOrdinalNumberFormatterNeuterGender = 3, } TTTOrdinalNumberFormatterPredicateGrammaticalGender; typedef enum { TTTOrdinalNumberFormatterSingular = 1, TTTOrdinalNumberFormatterDual = 2, TTTOrdinalNumberFormatterTrial = 3, TTTOrdinalNumberFormatterQuadral = 4, TTTOrdinalNumberFormatterSingularCollective, TTTOrdinalNumberFormatterPlural, } TTTOrdinalNumberFormatterPredicateGrammaticalNumber; @interface TTTOrdinalNumberFormatter : NSNumberFormatter { @private NSString *ordinalIndicator; TTTOrdinalNumberFormatterPredicateGrammaticalGender grammaticalGender; TTTOrdinalNumberFormatterPredicateGrammaticalNumber grammaticalNumber; } - (NSString *)ordinalIndicator; - (void)setOrdinalIndicator:(NSString *)indicator; - (TTTOrdinalNumberFormatterPredicateGrammaticalGender)grammaticalGender; - (void)setGrammaticalGender:(TTTOrdinalNumberFormatterPredicateGrammaticalGender)gender; - (TTTOrdinalNumberFormatterPredicateGrammaticalNumber)grammaticalNumber; - (void)setGrammaticalNumber:(TTTOrdinalNumberFormatterPredicateGrammaticalNumber)number; @end
brightsoftdev/FringeKit
iOSFramework/UtilityFramework/FormatterKit/TTTOrdinalNumberFormatter/TTTOrdinalNumberFormatter.h
C
apache-2.0
2,446
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.applicationautoscaling.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.applicationautoscaling.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * PutScheduledActionRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class PutScheduledActionRequestProtocolMarshaller implements Marshaller<Request<PutScheduledActionRequest>, PutScheduledActionRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/") .httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true) .operationIdentifier("AnyScaleFrontendService.PutScheduledAction").serviceName("AWSApplicationAutoScaling").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public PutScheduledActionRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<PutScheduledActionRequest> marshall(PutScheduledActionRequest putScheduledActionRequest) { if (putScheduledActionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<PutScheduledActionRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING, putScheduledActionRequest); protocolMarshaller.startMarshalling(); PutScheduledActionRequestMarshaller.getInstance().marshall(putScheduledActionRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
jentfoo/aws-sdk-java
aws-java-sdk-applicationautoscaling/src/main/java/com/amazonaws/services/applicationautoscaling/model/transform/PutScheduledActionRequestProtocolMarshaller.java
Java
apache-2.0
2,787
package io.qameta.htmlelements.element; /** * @author ehborisov */ public interface Image extends ExtendedWebElement { default String getSource() { return getAttribute("src"); } default String getAlt() { return getAttribute("alt"); } }
eroshenkoam/htmlelements
src/main/java/io/qameta/htmlelements/element/Image.java
Java
apache-2.0
272
package org.mockserver.integration.mocking; import org.junit.AfterClass; import org.junit.BeforeClass; import org.mockserver.integration.ClientAndServer; import org.mockserver.integration.server.AbstractBasicMockingIntegrationTest; import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.stop.Stop.stopQuietly; /** * @author jamesdbloom */ public class ClientAndServerMockingIntegrationTest extends AbstractBasicMockingIntegrationTest { private static int mockServerPort; @BeforeClass public static void startServer() { mockServerClient = startClientAndServer(); mockServerPort = ((ClientAndServer) mockServerClient).getLocalPort(); } @AfterClass public static void stopServer() { stopQuietly(mockServerClient); } @Override public int getServerPort() { return mockServerPort; } }
jamesdbloom/mockserver
mockserver-netty/src/test/java/org/mockserver/integration/mocking/ClientAndServerMockingIntegrationTest.java
Java
apache-2.0
914
/******************************************************************************* * Copyright 2013 Johns Hopkins University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package edu.jhu.pha.vospace; import java.io.InputStream; import java.net.InetAddress; import java.nio.ByteBuffer; import java.text.NumberFormat; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import javax.servlet.http.HttpServlet; import org.apache.log4j.Logger; import udt.UDTInputStream; import udt.UDTOutputStream; import udt.UDTReceiver; import udt.UDTServerSocket; import udt.UDTSocket; import udt.util.Util; import edu.jhu.pha.vospace.api.SizeLimitInputStream; import edu.jhu.pha.vospace.api.exceptions.InternalServerErrorException; import edu.jhu.pha.vospace.api.exceptions.NotFoundException; import edu.jhu.pha.vospace.api.exceptions.PermissionDeniedException; import edu.jhu.pha.vospace.jobs.JobsProcessor; import edu.jhu.pha.vospace.node.Node; import edu.jhu.pha.vospace.node.NodeFactory; import edu.jhu.pha.vospace.oauth.SciDriveUser; import edu.jhu.pha.vospace.rest.JobDescription; import edu.jhu.pha.vospace.rest.JobDescription.STATE; import edu.jhu.pha.vospace.storage.StorageManager; import edu.jhu.pha.vospace.storage.StorageManagerFactory; public class UdtServlet extends HttpServlet implements Runnable { private static final long serialVersionUID = 8871136211551367030L; private static final Logger logger = Logger.getLogger(UdtServlet.class); private final int serverPort = 9000; private final ExecutorService threadPool=Executors.newFixedThreadPool(3); private Thread udtThread; @Override public void init() { udtThread = new Thread(this, "UDT Thread"); udtThread.setDaemon(true); udtThread.start(); } @Override public void destroy() { if(null != udtThread) udtThread.interrupt(); } public void run(){ UDTServerSocket server = null; try{ UDTReceiver.connectionExpiryDisabled=true; InetAddress myHost=InetAddress.getLocalHost(); logger.debug("Running the UDT server"); server = new UDTServerSocket(myHost,serverPort); while(!udtThread.isInterrupted()){ UDTSocket socket=server.accept(); Thread.sleep(1000); threadPool.execute(new RequestRunner(socket)); } logger.debug("Shutting down the UDT server"); server.shutDown(); }catch(Exception ex){ server.shutDown(); throw new RuntimeException(ex); } } public static class RequestRunner implements Runnable{ private final static Logger logger=Logger.getLogger(RequestRunner.class.getName()); private final UDTSocket socket; private final NumberFormat format=NumberFormat.getNumberInstance(); public RequestRunner(UDTSocket socket){ this.socket=socket; format.setMaximumFractionDigits(3); } public void run(){ try{ logger.debug("Handling request from "+socket.getSession().getDestination()); UDTInputStream in=socket.getInputStream(); UDTOutputStream out=socket.getOutputStream(); byte[]readBuf=new byte[32768]; ByteBuffer bb=ByteBuffer.wrap(readBuf); //read file name info while(in.read(readBuf)==0)Thread.sleep(100); //how many bytes to read for the file name byte[] jobId = new byte[36]; bb.get(jobId); JobDescription job = JobsProcessor.getJob(UUID.fromString(new String(jobId))); if(null == job) throw new NotFoundException("The job "+jobId+" is not found."); SciDriveUser username = SciDriveUser.fromName(job.getUsername()); switch(job.getDirection()) { case PULLFROMVOSPACE: { JobsProcessor.modifyJobState(job, STATE.RUN); logger.debug("Sending node through UDT: "+job.getTarget()); StorageManager backend = StorageManagerFactory.getStorageManager(username); InputStream dataInp = backend.getBytes(job.getTargetId().getNodePath()); try { Node node = NodeFactory.getNode(job.getTargetId(), username); //long size = Long.parseLong(backend.getNodeSize(job.getTargetId().getNodePath())); long size = node.getNodeInfo().getSize(); out.write(encode64(size)); out.flush(); long start=System.currentTimeMillis(); Util.copy(dataInp, out, size, false); JobsProcessor.modifyJobState(job, STATE.COMPLETED); logger.debug("[SendFile] Finished sending data."); long end=System.currentTimeMillis(); logger.debug(socket.getSession().getStatistics().toString()); double rate=1000.0*size/1024/1024/(end-start); logger.debug("[SendFile] Rate: "+format.format(rate)+" MBytes/sec. "+format.format(8*rate)+" MBit/sec."); logger.debug("Finished request from "+socket.getSession().getDestination()); } catch(InternalServerErrorException ex) { JobsProcessor.modifyJobState(job, STATE.ERROR); throw ex; } catch(NotFoundException ex) { JobsProcessor.modifyJobState(job, STATE.ERROR); throw ex; } catch(PermissionDeniedException ex) { JobsProcessor.modifyJobState(job, STATE.ERROR); throw ex; }finally{ logger.debug("Closing all the stuff"); try {socket.getSender().stop();} catch(Exception ex) {} try {socket.close();} catch(Exception ex) {} logger.debug("Closed all the stuff"); } break; } case PUSHTOVOSPACE: { JobsProcessor.modifyJobState(job, STATE.RUN); logger.debug("Receiving node through UDT: "+job.getTarget()); StorageManager backend = StorageManagerFactory.getStorageManager(username); try { byte[]sizeInfo=new byte[8]; int total=0; while(total<sizeInfo.length){ int r=in.read(sizeInfo); if(r<0)break; total+=r; } long size=decode(sizeInfo, 0); long start=System.currentTimeMillis(); backend.putBytes(job.getTargetId().getNodePath(), new SizeLimitInputStream(in, size)); logger.debug("Got the file"); long end=System.currentTimeMillis(); JobsProcessor.modifyJobState(job, STATE.COMPLETED); logger.debug("[ReceiveFile] Finished receiving data."); logger.debug(socket.getSession().getStatistics().toString()); double rate=1000.0*size/1024/1024/(end-start); logger.debug("[ReceiveFile] Rate: "+format.format(rate)+" MBytes/sec. "+format.format(8*rate)+" MBit/sec."); logger.debug("Finished request from "+socket.getSession().getDestination()); } catch(InternalServerErrorException ex) { JobsProcessor.modifyJobState(job, STATE.ERROR); throw ex; } catch(NotFoundException ex) { JobsProcessor.modifyJobState(job, STATE.ERROR); throw ex; } catch(PermissionDeniedException ex) { JobsProcessor.modifyJobState(job, STATE.ERROR); throw ex; }finally{ logger.debug("Closing all the stuff"); try {socket.getSender().stop();} catch(Exception ex) {} try {socket.close();} catch(Exception ex) {} logger.debug("Closed all the stuff"); } break; } default: throw new InternalServerErrorException("The job "+job.getDirection()+" is unsupported in this path."); } }catch(Exception ex){ ex.printStackTrace(); throw new RuntimeException(ex); } } } static byte[]encode64(long value){ byte m4= (byte) (value>>24 ); byte m3=(byte)(value>>16); byte m2=(byte)(value>>8); byte m1=(byte)(value); return new byte[]{m1,m2,m3,m4,0,0,0,0}; } static long decode(byte[]data, int start){ long result = (data[start+3] & 0xFF)<<24 |(data[start+2] & 0xFF)<<16 |(data[start+1] & 0xFF)<<8 |(data[start] & 0xFF); return result; } }
dimm0/scidrive
src/edu/jhu/pha/vospace/UdtServlet.java
Java
apache-2.0
8,300
using UnityEngine; using System.Collections; public class BlockMovement : MonoBehaviour { // Use this for initialization void Start () { } // Update is called once per frame void Update () { } }
harjup/WizardBroadcast
Assets/Scripts/Interactables/BlockMovement.cs
C#
apache-2.0
212
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head><meta http-equiv="content-type" content="text/html; charset=UTF-8" /> <title>ApexParser xref</title> <link type="text/css" rel="stylesheet" href="../../../../../stylesheet.css" /> </head> <body> <div id="overview"><a href="../../../../../../apidocs/net/sourceforge/pmd/lang/apex/ApexParser.html">View Javadoc</a></div><pre> <a class="jxr_linenumber" name="L1" href="#L1">1</a> <em class="jxr_javadoccomment">/**</em> <a class="jxr_linenumber" name="L2" href="#L2">2</a> <em class="jxr_javadoccomment"> * BSD-style license; for more info see <a href="http://pmd.sourceforge.net/license.htm" target="alexandria_uri">http://pmd.sourceforge.net/license.htm</a>l</em> <a class="jxr_linenumber" name="L3" href="#L3">3</a> <em class="jxr_javadoccomment"> */</em> <a class="jxr_linenumber" name="L4" href="#L4">4</a> <strong class="jxr_keyword">package</strong> net.sourceforge.pmd.lang.apex; <a class="jxr_linenumber" name="L5" href="#L5">5</a> <a class="jxr_linenumber" name="L6" href="#L6">6</a> <strong class="jxr_keyword">import</strong> java.io.Reader; <a class="jxr_linenumber" name="L7" href="#L7">7</a> <strong class="jxr_keyword">import</strong> java.util.Map; <a class="jxr_linenumber" name="L8" href="#L8">8</a> <a class="jxr_linenumber" name="L9" href="#L9">9</a> <strong class="jxr_keyword">import</strong> net.sourceforge.pmd.lang.AbstractParser; <a class="jxr_linenumber" name="L10" href="#L10">10</a> <strong class="jxr_keyword">import</strong> net.sourceforge.pmd.lang.ParserOptions; <a class="jxr_linenumber" name="L11" href="#L11">11</a> <strong class="jxr_keyword">import</strong> net.sourceforge.pmd.lang.TokenManager; <a class="jxr_linenumber" name="L12" href="#L12">12</a> <strong class="jxr_keyword">import</strong> net.sourceforge.pmd.lang.ast.Node; <a class="jxr_linenumber" name="L13" href="#L13">13</a> <strong class="jxr_keyword">import</strong> net.sourceforge.pmd.lang.ast.ParseException; <a class="jxr_linenumber" name="L14" href="#L14">14</a> <a class="jxr_linenumber" name="L15" href="#L15">15</a> <em class="jxr_javadoccomment">/**</em> <a class="jxr_linenumber" name="L16" href="#L16">16</a> <em class="jxr_javadoccomment"> * Adapter for the Apex jorje parser</em> <a class="jxr_linenumber" name="L17" href="#L17">17</a> <em class="jxr_javadoccomment"> */</em> <a class="jxr_linenumber" name="L18" href="#L18">18</a> <strong class="jxr_keyword">public</strong> <strong class="jxr_keyword">class</strong> <a href="../../../../../net/sourceforge/pmd/lang/apex/ApexParser.html">ApexParser</a> <strong class="jxr_keyword">extends</strong> <a href="../../../../../net/sourceforge/pmd/lang/AbstractParser.html">AbstractParser</a> { <a class="jxr_linenumber" name="L19" href="#L19">19</a> <strong class="jxr_keyword">private</strong> net.sourceforge.pmd.lang.apex.ast.ApexParser apexParser; <a class="jxr_linenumber" name="L20" href="#L20">20</a> <a class="jxr_linenumber" name="L21" href="#L21">21</a> <strong class="jxr_keyword">public</strong> <a href="../../../../../net/sourceforge/pmd/lang/apex/ApexParser.html">ApexParser</a>(<a href="../../../../../net/sourceforge/pmd/lang/ParserOptions.html">ParserOptions</a> parserOptions) { <a class="jxr_linenumber" name="L22" href="#L22">22</a> <strong class="jxr_keyword">super</strong>(parserOptions); <a class="jxr_linenumber" name="L23" href="#L23">23</a> apexParser = <strong class="jxr_keyword">new</strong> net.sourceforge.pmd.lang.apex.ast.ApexParser((ApexParserOptions) parserOptions); <a class="jxr_linenumber" name="L24" href="#L24">24</a> } <a class="jxr_linenumber" name="L25" href="#L25">25</a> <a class="jxr_linenumber" name="L26" href="#L26">26</a> @Override <a class="jxr_linenumber" name="L27" href="#L27">27</a> <strong class="jxr_keyword">public</strong> <a href="../../../../../net/sourceforge/pmd/lang/TokenManager.html">TokenManager</a> createTokenManager(Reader source) { <a class="jxr_linenumber" name="L28" href="#L28">28</a> <strong class="jxr_keyword">return</strong> <strong class="jxr_keyword">null</strong>; <a class="jxr_linenumber" name="L29" href="#L29">29</a> } <a class="jxr_linenumber" name="L30" href="#L30">30</a> <a class="jxr_linenumber" name="L31" href="#L31">31</a> <strong class="jxr_keyword">public</strong> <strong class="jxr_keyword">boolean</strong> canParse() { <a class="jxr_linenumber" name="L32" href="#L32">32</a> <strong class="jxr_keyword">return</strong> <strong class="jxr_keyword">true</strong>; <a class="jxr_linenumber" name="L33" href="#L33">33</a> } <a class="jxr_linenumber" name="L34" href="#L34">34</a> <a class="jxr_linenumber" name="L35" href="#L35">35</a> <strong class="jxr_keyword">public</strong> <a href="../../../../../net/sourceforge/pmd/lang/ast/Node.html">Node</a> parse(String fileName, Reader source) <strong class="jxr_keyword">throws</strong> ParseException { <a class="jxr_linenumber" name="L36" href="#L36">36</a> <strong class="jxr_keyword">return</strong> apexParser.parse(source); <a class="jxr_linenumber" name="L37" href="#L37">37</a> } <a class="jxr_linenumber" name="L38" href="#L38">38</a> <a class="jxr_linenumber" name="L39" href="#L39">39</a> <strong class="jxr_keyword">public</strong> Map&lt;Integer, String&gt; getSuppressMap() { <a class="jxr_linenumber" name="L40" href="#L40">40</a> <strong class="jxr_keyword">return</strong> apexParser.getSuppressMap(); <a class="jxr_linenumber" name="L41" href="#L41">41</a> } <a class="jxr_linenumber" name="L42" href="#L42">42</a> } </pre> <hr/> <div id="footer">Copyright &#169; 2002&#x2013;2016 <a href="http://pmd.sourceforge.net/">InfoEther</a>. All rights reserved.</div> </body> </html>
jasonwee/videoOnCloud
pmd/pmd-doc-5.5.1/xref/net/sourceforge/pmd/lang/apex/ApexParser.html
HTML
apache-2.0
5,880
<div class="ui-block ui-shadow business_item" data-id="{{b.id}}" style="margin-top:2.5em;" > <div class="ui-block-head"> <div class="ui-content" > <strong>{{b.name}}</strong> </div> <div class="ui-content" style="text-transform: none;" > <div class="ui-right" > <a onclick="deafaultBusiness(event,this)" >Default</a> &nbsp;|&nbsp;<a onclick="deleteBusiness(event,this)" >Delete</a> </div> </div> </div> <div class="ui-block-body"> <table class="ui-form-table" > <tr class="info_item" > <td>NAME</td> <td> <form method="POST" action="/business/update/" class="ajax-form"> <div> {% csrf_token %} <input type="hidden" name="B_id" value="{{b.id}}" /> <input readonly type="text" name="B_name" value="{{b.name}}" data-value="{{b.name}}" placeholder="Name" class="ui-input" /> </div> <div class="ui-hide item_controls" > <input type="submit" value="Save" class="ui-btn" /> <input type="button" value="Cancel" onclick="editText(event,this)" class="ui-btn" /> </div> </form> </td> <td><a onclick="editText(event,this)" >Edit</a></td> </tr> <tr class="info_item" > <td>ABOUT</td> <td> <form method="POST" action="/business/update/" class="ajax-form"> <div> {% csrf_token %} <input type="hidden" name="B_id" value="{{b.id}}" /> <input readonly type="text" name="B_about" value="{{b.about}}" data-value="{{b.about}}" placeholder="About Business" class="ui-input" /> </div> <div class="ui-hide item_controls" > <input type="submit" value="Save" class="ui-btn" /> <input type="button" value="Cancel" onclick="editText(event,this)" class="ui-btn" /> </div> </form> </td> <td><a onclick="editText(event,this)" >Edit</a></td> </tr> <tr class="info_item" > <td>CATEGORY</td> <td> <form method="POST" action="/business/update/" class="ajax-form"> <div> {% csrf_token %} <input type="hidden" name="B_id" value="{{b.id}}" /> <select readonly name="B_category" value="{{b.category}}" data-value="{{b.category}}" class="ui-input" > {% for cat in categories %} <option value="{{cat.id}}" {% if b.fk_category.id == cat.id %}selected{% endif %}>{{cat.name}}</option> {% endfor %} </select> </div> <div class="ui-hide item_controls" > <input type="submit" value="Save" class="ui-btn" /> <input type="button" value="Cancel" onclick="editText(event,this)" class="ui-btn" /> </div> </form> </td> <td><a onclick="editText(event,this)" >Edit</a></td> </tr> <tr class="info_item" > <td>WEBSITE</td> <td> <form method="POST" action="/business/update/" class="ajax-form"> <div> {% csrf_token %} <input type="hidden" name="B_id" value="{{b.id}}" /> <input readonly type="text" name="B_website" value="{{b.website}}" data-value="{{b.website}}" placeholder="Website e.g. http://mybusiness.com" class="ui-input" /> </div> <div class="ui-hide item_controls" > <input type="submit" value="Save" class="ui-btn" /> <input type="button" value="Cancel" onclick="editText(event,this)" class="ui-btn" /> </div> </form> </td> <td><a onclick="editText(event,this)" >Edit</a></td> </tr> </table> <div id="business_addr_cont" > {% if b.address %} {% for address in b.address %} {% include '../locus/address_item_1.html' %} {% endfor %} {% else %} <div class="ui-body" > <h3>No Business Address</h3> </div> {% endif %} </div> <div style="margin-top: 1em;"> <a class="ui-btn-a" onclick="showAddressSelectBox(event, this)" >[+] Add From Address Book</a> <a class="ui-btn-a" onclick="showAddressBox(event, this)" >[+] Add New Address</a> </div> </div> </div> <!-- end item -->
amitdhiman000/MyOffers
templates/mobile/business/business_item_1.html
HTML
apache-2.0
5,171
/* * Copyright (c) 2014 Andree Hagelstein, Maik Schulze, Deutsche Telekom AG. All Rights Reserved. * * Filename: ImageClientGingerbread.java */ package com.task4java.http.client; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.Proxy; import java.net.URL; import java.security.cert.CertificateException; import java.util.concurrent.atomic.AtomicInteger; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import com.task4java.Stopwatch; import com.task4java.Tuple; import com.task4java.http.HttpStatusCodes; import com.task4java.net.URLBuilder; import com.task4java.util.log.Logger; public class ImageClientGingerbread implements IImageClient { private static final String DID_NOT_DEFINE_AN_ACTION = "You did not define an action! Image call canceled!"; private static final String TAG = ImageClient.class.getName(); private static AtomicInteger _operations = new AtomicInteger(0); private static AtomicInteger _connects = new AtomicInteger(0); private static Tuple<String, Integer> _proxySettings = null; public int getOperations() { return _operations.get(); } public void ignoreCertificateErrors() { // Create a trust manager that does not validate certificate chains TrustManager[] trustAllCerts = new TrustManager[] { new X509TrustManager() { @Override public void checkClientTrusted(java.security.cert.X509Certificate[] chain, String authType) throws CertificateException { } @Override public void checkServerTrusted(java.security.cert.X509Certificate[] chain, String authType) throws CertificateException { } @Override public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } } }; // Install the all-trusting trust manager try { SSLContext sc = SSLContext.getInstance("SSL"); sc.init(null, trustAllCerts, new java.security.SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory()); } catch (Exception e) { } } public void setProxy(String hostname, Integer port) { if (hostname == null) { _proxySettings = null; } else { _proxySettings = new Tuple<String, Integer>(hostname, port); } } public ImageResponse getImage(URL action) throws MalformedURLException, IOException { HttpURLConnection connection = null; InputStream inputStream = null; int responseLength = -1; Stopwatch watch = new Stopwatch(); watch.start(); try { if (action == null) { Logger.instance.e(TAG, DID_NOT_DEFINE_AN_ACTION); throw new IllegalArgumentException(DID_NOT_DEFINE_AN_ACTION); } int connects = _connects.incrementAndGet(); _operations.incrementAndGet(); Logger.instance.d(TAG, "Enter getImage, connects: " + connects + ", url: " + action); if (_proxySettings == null) { connection = (HttpURLConnection) new URLBuilder(action).getURL().openConnection(); } else { Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(_proxySettings.left, _proxySettings.right)); connection = (HttpURLConnection) new URLBuilder(action).getURL().openConnection(proxy); } connection.setReadTimeout(60000); connection.setConnectTimeout(60000); connection.connect(); ImageResponse response = new ImageResponse(connection.getResponseCode()); if (response.getStatusCode() < HttpStatusCodes.OK || response.getStatusCode() >= HttpStatusCodes.BadRequest) { Logger.instance.d(TAG, "Leave getImage with status code: " + response.getStatusCode()); return response; } inputStream = connection.getInputStream(); if (inputStream != null) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); byte[] buffer = new byte[1024 * 4]; int n = 0; while (-1 != (n = inputStream.read(buffer))) { bos.write(buffer, 0, n); } response.setResponseData(bos.toByteArray()); responseLength = response.getResponseData().length; } return response; } finally { int connects = _connects.decrementAndGet(); watch.stop(); Logger.instance.d(TAG, "Leave getImage, connects: " + connects + ", time: " + watch.getElapsedMilliseconds() + ", length: " + responseLength + ", url: " + action); if (inputStream != null) { inputStream.close(); inputStream = null; } if (connection != null) { connection.disconnect(); connection = null; } } } }
dtag-dbu/task4java
com.task4java/src/com/task4java/http/client/ImageClientGingerbread.java
Java
apache-2.0
5,830
-- -*- test-case-name: txdav.caldav.datastore.test.test_sql,txdav.carddav.datastore.test.test_sql -*- ---- -- Copyright (c) 2010-2015 Apple Inc. All rights reserved. -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. ---- ----------------- -- Resource ID -- ----------------- create sequence RESOURCE_ID_SEQ; ------------------------- -- Cluster Bookkeeping -- ------------------------- -- Information about a process connected to this database. -- Note that this must match the node info schema in twext.enterprise.queue. create table NODE_INFO ( HOSTNAME varchar(255) not null, PID integer not null, PORT integer not null, TIME timestamp not null default timezone('UTC', CURRENT_TIMESTAMP), primary key (HOSTNAME, PORT) ); -- Unique named locks. This table should always be empty, but rows are -- temporarily created in order to prevent undesirable concurrency. create table NAMED_LOCK ( LOCK_NAME varchar(255) primary key ); ------------------- -- Calendar Home -- ------------------- create table CALENDAR_HOME ( RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index OWNER_UID varchar(255) not null unique, -- implicit index STATUS integer default 0 not null, -- enum HOME_STATUS DATAVERSION integer default 0 not null ); -- Enumeration of statuses create table HOME_STATUS ( ID integer primary key, DESCRIPTION varchar(16) not null unique ); insert into HOME_STATUS values (0, 'normal' ); insert into HOME_STATUS values (1, 'external'); -------------- -- Calendar -- -------------- create table CALENDAR ( RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ') -- implicit index ); ---------------------------- -- Calendar Home Metadata -- ---------------------------- create table CALENDAR_HOME_METADATA ( RESOURCE_ID integer primary key references CALENDAR_HOME on delete cascade, -- implicit index QUOTA_USED_BYTES integer default 0 not null, DEFAULT_EVENTS integer default null references CALENDAR on delete set null, DEFAULT_TASKS integer default null references CALENDAR on delete set null, DEFAULT_POLLS integer default null references CALENDAR on delete set null, ALARM_VEVENT_TIMED text default null, ALARM_VEVENT_ALLDAY text default null, ALARM_VTODO_TIMED text default null, ALARM_VTODO_ALLDAY text default null, AVAILABILITY text default null, CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP), MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP) ); create index CALENDAR_HOME_METADATA_DEFAULT_EVENTS on CALENDAR_HOME_METADATA(DEFAULT_EVENTS); create index CALENDAR_HOME_METADATA_DEFAULT_TASKS on CALENDAR_HOME_METADATA(DEFAULT_TASKS); create index CALENDAR_HOME_METADATA_DEFAULT_POLLS on CALENDAR_HOME_METADATA(DEFAULT_POLLS); ----------------------- -- Calendar Metadata -- ----------------------- create table CALENDAR_METADATA ( RESOURCE_ID integer primary key references CALENDAR on delete cascade, -- implicit index SUPPORTED_COMPONENTS varchar(255) default null, CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP), MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP) ); --------------------------- -- Sharing Notifications -- --------------------------- create table NOTIFICATION_HOME ( RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index OWNER_UID varchar(255) not null unique, -- implicit index STATUS integer default 0 not null, -- enum HOME_STATUS DATAVERSION integer default 0 not null ); create table NOTIFICATION ( RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index NOTIFICATION_HOME_RESOURCE_ID integer not null references NOTIFICATION_HOME, NOTIFICATION_UID varchar(255) not null, NOTIFICATION_TYPE varchar(255) not null, NOTIFICATION_DATA text not null, MD5 char(32) not null, CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP), MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP), unique(NOTIFICATION_UID, NOTIFICATION_HOME_RESOURCE_ID) -- implicit index ); create index NOTIFICATION_NOTIFICATION_HOME_RESOURCE_ID on NOTIFICATION(NOTIFICATION_HOME_RESOURCE_ID); ------------------- -- Calendar Bind -- ------------------- -- Joins CALENDAR_HOME and CALENDAR create table CALENDAR_BIND ( CALENDAR_HOME_RESOURCE_ID integer not null references CALENDAR_HOME, CALENDAR_RESOURCE_ID integer not null references CALENDAR on delete cascade, EXTERNAL_ID integer default null, CALENDAR_RESOURCE_NAME varchar(255) not null, BIND_MODE integer not null, -- enum CALENDAR_BIND_MODE BIND_STATUS integer not null, -- enum CALENDAR_BIND_STATUS BIND_REVISION integer default 0 not null, MESSAGE text, TRANSP integer default 0 not null, -- enum CALENDAR_TRANSP ALARM_VEVENT_TIMED text default null, ALARM_VEVENT_ALLDAY text default null, ALARM_VTODO_TIMED text default null, ALARM_VTODO_ALLDAY text default null, TIMEZONE text default null, primary key(CALENDAR_HOME_RESOURCE_ID, CALENDAR_RESOURCE_ID), -- implicit index unique(CALENDAR_HOME_RESOURCE_ID, CALENDAR_RESOURCE_NAME) -- implicit index ); create index CALENDAR_BIND_RESOURCE_ID on CALENDAR_BIND(CALENDAR_RESOURCE_ID); -- Enumeration of calendar bind modes create table CALENDAR_BIND_MODE ( ID integer primary key, DESCRIPTION varchar(16) not null unique ); insert into CALENDAR_BIND_MODE values (0, 'own' ); insert into CALENDAR_BIND_MODE values (1, 'read' ); insert into CALENDAR_BIND_MODE values (2, 'write'); insert into CALENDAR_BIND_MODE values (3, 'direct'); insert into CALENDAR_BIND_MODE values (4, 'indirect'); -- Enumeration of statuses create table CALENDAR_BIND_STATUS ( ID integer primary key, DESCRIPTION varchar(16) not null unique ); insert into CALENDAR_BIND_STATUS values (0, 'invited' ); insert into CALENDAR_BIND_STATUS values (1, 'accepted'); insert into CALENDAR_BIND_STATUS values (2, 'declined'); insert into CALENDAR_BIND_STATUS values (3, 'invalid'); insert into CALENDAR_BIND_STATUS values (4, 'deleted'); -- Enumeration of transparency create table CALENDAR_TRANSP ( ID integer primary key, DESCRIPTION varchar(16) not null unique ); insert into CALENDAR_TRANSP values (0, 'opaque' ); insert into CALENDAR_TRANSP values (1, 'transparent'); --------------------- -- Calendar Object -- --------------------- create table CALENDAR_OBJECT ( RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index CALENDAR_RESOURCE_ID integer not null references CALENDAR on delete cascade, RESOURCE_NAME varchar(255) not null, ICALENDAR_TEXT text not null, ICALENDAR_UID varchar(255) not null, ICALENDAR_TYPE varchar(255) not null, ATTACHMENTS_MODE integer default 0 not null, -- enum CALENDAR_OBJECT_ATTACHMENTS_MODE DROPBOX_ID varchar(255), ORGANIZER varchar(255), RECURRANCE_MIN date, -- minimum date that recurrences have been expanded to. RECURRANCE_MAX date, -- maximum date that recurrences have been expanded to. ACCESS integer default 0 not null, SCHEDULE_OBJECT boolean default false, SCHEDULE_TAG varchar(36) default null, SCHEDULE_ETAGS text default null, PRIVATE_COMMENTS boolean default false not null, MD5 char(32) not null, CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP), MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP), unique (CALENDAR_RESOURCE_ID, RESOURCE_NAME) -- implicit index -- since the 'inbox' is a 'calendar resource' for the purpose of storing -- calendar objects, this constraint has to be selectively enforced by the -- application layer. -- unique(CALENDAR_RESOURCE_ID, ICALENDAR_UID) ); create index CALENDAR_OBJECT_CALENDAR_RESOURCE_ID_AND_ICALENDAR_UID on CALENDAR_OBJECT(CALENDAR_RESOURCE_ID, ICALENDAR_UID); create index CALENDAR_OBJECT_CALENDAR_RESOURCE_ID_RECURRANCE_MAX on CALENDAR_OBJECT(CALENDAR_RESOURCE_ID, RECURRANCE_MAX); create index CALENDAR_OBJECT_ICALENDAR_UID on CALENDAR_OBJECT(ICALENDAR_UID); create index CALENDAR_OBJECT_DROPBOX_ID on CALENDAR_OBJECT(DROPBOX_ID); -- Enumeration of attachment modes create table CALENDAR_OBJECT_ATTACHMENTS_MODE ( ID integer primary key, DESCRIPTION varchar(16) not null unique ); insert into CALENDAR_OBJECT_ATTACHMENTS_MODE values (0, 'none' ); insert into CALENDAR_OBJECT_ATTACHMENTS_MODE values (1, 'read' ); insert into CALENDAR_OBJECT_ATTACHMENTS_MODE values (2, 'write'); -- Enumeration of calendar access types create table CALENDAR_ACCESS_TYPE ( ID integer primary key, DESCRIPTION varchar(32) not null unique ); insert into CALENDAR_ACCESS_TYPE values (0, '' ); insert into CALENDAR_ACCESS_TYPE values (1, 'public' ); insert into CALENDAR_ACCESS_TYPE values (2, 'private' ); insert into CALENDAR_ACCESS_TYPE values (3, 'confidential' ); insert into CALENDAR_ACCESS_TYPE values (4, 'restricted' ); ----------------- -- Instance ID -- ----------------- create sequence INSTANCE_ID_SEQ; ---------------- -- Time Range -- ---------------- create table TIME_RANGE ( INSTANCE_ID integer primary key default nextval('INSTANCE_ID_SEQ'), -- implicit index CALENDAR_RESOURCE_ID integer not null references CALENDAR on delete cascade, CALENDAR_OBJECT_RESOURCE_ID integer not null references CALENDAR_OBJECT on delete cascade, FLOATING boolean not null, START_DATE timestamp not null, END_DATE timestamp not null, FBTYPE integer not null, TRANSPARENT boolean not null ); create index TIME_RANGE_CALENDAR_RESOURCE_ID on TIME_RANGE(CALENDAR_RESOURCE_ID); create index TIME_RANGE_CALENDAR_OBJECT_RESOURCE_ID on TIME_RANGE(CALENDAR_OBJECT_RESOURCE_ID); -- Enumeration of free/busy types create table FREE_BUSY_TYPE ( ID integer primary key, DESCRIPTION varchar(16) not null unique ); insert into FREE_BUSY_TYPE values (0, 'unknown' ); insert into FREE_BUSY_TYPE values (1, 'free' ); insert into FREE_BUSY_TYPE values (2, 'busy' ); insert into FREE_BUSY_TYPE values (3, 'busy-unavailable'); insert into FREE_BUSY_TYPE values (4, 'busy-tentative' ); ------------------ -- Transparency -- ------------------ create table TRANSPARENCY ( TIME_RANGE_INSTANCE_ID integer not null references TIME_RANGE on delete cascade, USER_ID varchar(255) not null, TRANSPARENT boolean not null ); create index TRANSPARENCY_TIME_RANGE_INSTANCE_ID on TRANSPARENCY(TIME_RANGE_INSTANCE_ID); ---------------- -- Attachment -- ---------------- create sequence ATTACHMENT_ID_SEQ; create table ATTACHMENT ( ATTACHMENT_ID integer primary key default nextval('ATTACHMENT_ID_SEQ'), -- implicit index CALENDAR_HOME_RESOURCE_ID integer not null references CALENDAR_HOME, DROPBOX_ID varchar(255), CONTENT_TYPE varchar(255) not null, SIZE integer not null, MD5 char(32) not null, CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP), MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP), PATH varchar(1024) not null ); create index ATTACHMENT_CALENDAR_HOME_RESOURCE_ID on ATTACHMENT(CALENDAR_HOME_RESOURCE_ID); create index ATTACHMENT_DROPBOX_ID on ATTACHMENT(DROPBOX_ID); -- Many-to-many relationship between attachments and calendar objects create table ATTACHMENT_CALENDAR_OBJECT ( ATTACHMENT_ID integer not null references ATTACHMENT on delete cascade, MANAGED_ID varchar(255) not null, CALENDAR_OBJECT_RESOURCE_ID integer not null references CALENDAR_OBJECT on delete cascade, primary key (ATTACHMENT_ID, CALENDAR_OBJECT_RESOURCE_ID), -- implicit index unique (MANAGED_ID, CALENDAR_OBJECT_RESOURCE_ID) --implicit index ); create index ATTACHMENT_CALENDAR_OBJECT_CALENDAR_OBJECT_RESOURCE_ID on ATTACHMENT_CALENDAR_OBJECT(CALENDAR_OBJECT_RESOURCE_ID); ----------------------- -- Resource Property -- ----------------------- create table RESOURCE_PROPERTY ( RESOURCE_ID integer not null, -- foreign key: *.RESOURCE_ID NAME varchar(255) not null, VALUE text not null, -- FIXME: xml? VIEWER_UID varchar(255), primary key (RESOURCE_ID, NAME, VIEWER_UID) -- implicit index ); ---------------------- -- AddressBook Home -- ---------------------- create table ADDRESSBOOK_HOME ( RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index ADDRESSBOOK_PROPERTY_STORE_ID integer default nextval('RESOURCE_ID_SEQ') not null, -- implicit index OWNER_UID varchar(255) not null unique, -- implicit index STATUS integer default 0 not null, -- enum HOME_STATUS DATAVERSION integer default 0 not null ); ------------------------------- -- AddressBook Home Metadata -- ------------------------------- create table ADDRESSBOOK_HOME_METADATA ( RESOURCE_ID integer primary key references ADDRESSBOOK_HOME on delete cascade, -- implicit index QUOTA_USED_BYTES integer default 0 not null, CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP), MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP) ); ----------------------------- -- Shared AddressBook Bind -- ----------------------------- -- Joins sharee ADDRESSBOOK_HOME and owner ADDRESSBOOK_HOME create table SHARED_ADDRESSBOOK_BIND ( ADDRESSBOOK_HOME_RESOURCE_ID integer not null references ADDRESSBOOK_HOME, OWNER_HOME_RESOURCE_ID integer not null references ADDRESSBOOK_HOME on delete cascade, EXTERNAL_ID integer default null, ADDRESSBOOK_RESOURCE_NAME varchar(255) not null, BIND_MODE integer not null, -- enum CALENDAR_BIND_MODE BIND_STATUS integer not null, -- enum CALENDAR_BIND_STATUS BIND_REVISION integer default 0 not null, MESSAGE text, -- FIXME: xml? primary key (ADDRESSBOOK_HOME_RESOURCE_ID, OWNER_HOME_RESOURCE_ID), -- implicit index unique (ADDRESSBOOK_HOME_RESOURCE_ID, ADDRESSBOOK_RESOURCE_NAME) -- implicit index ); create index SHARED_ADDRESSBOOK_BIND_RESOURCE_ID on SHARED_ADDRESSBOOK_BIND(OWNER_HOME_RESOURCE_ID); ------------------------ -- AddressBook Object -- ------------------------ create table ADDRESSBOOK_OBJECT ( RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index ADDRESSBOOK_HOME_RESOURCE_ID integer not null references ADDRESSBOOK_HOME on delete cascade, RESOURCE_NAME varchar(255) not null, VCARD_TEXT text not null, VCARD_UID varchar(255) not null, KIND integer not null, -- enum ADDRESSBOOK_OBJECT_KIND MD5 char(32) not null, CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP), MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP), unique (ADDRESSBOOK_HOME_RESOURCE_ID, RESOURCE_NAME), -- implicit index unique (ADDRESSBOOK_HOME_RESOURCE_ID, VCARD_UID) -- implicit index ); ----------------------------- -- AddressBook Object kind -- ----------------------------- create table ADDRESSBOOK_OBJECT_KIND ( ID integer primary key, DESCRIPTION varchar(16) not null unique ); insert into ADDRESSBOOK_OBJECT_KIND values (0, 'person'); insert into ADDRESSBOOK_OBJECT_KIND values (1, 'group' ); insert into ADDRESSBOOK_OBJECT_KIND values (2, 'resource'); insert into ADDRESSBOOK_OBJECT_KIND values (3, 'location'); ---------------------------------- -- Revisions, forward reference -- ---------------------------------- create sequence REVISION_SEQ; --------------------------------- -- Address Book Object Members -- --------------------------------- create table ABO_MEMBERS ( GROUP_ID integer not null, -- references ADDRESSBOOK_OBJECT on delete cascade, -- AddressBook Object's (kind=='group') RESOURCE_ID ADDRESSBOOK_ID integer not null references ADDRESSBOOK_HOME on delete cascade, MEMBER_ID integer not null, -- references ADDRESSBOOK_OBJECT, -- member AddressBook Object's RESOURCE_ID REVISION integer default nextval('REVISION_SEQ') not null, REMOVED boolean default false not null, primary key (GROUP_ID, MEMBER_ID, REVISION) -- implicit index ); create index ABO_MEMBERS_ADDRESSBOOK_ID on ABO_MEMBERS(ADDRESSBOOK_ID); create index ABO_MEMBERS_MEMBER_ID on ABO_MEMBERS(MEMBER_ID); ------------------------------------------ -- Address Book Object Foreign Members -- ------------------------------------------ create table ABO_FOREIGN_MEMBERS ( GROUP_ID integer not null references ADDRESSBOOK_OBJECT on delete cascade, -- AddressBook Object's (kind=='group') RESOURCE_ID ADDRESSBOOK_ID integer not null references ADDRESSBOOK_HOME on delete cascade, MEMBER_ADDRESS varchar(255) not null, -- member AddressBook Object's 'calendar' address primary key (GROUP_ID, MEMBER_ADDRESS) -- implicit index ); create index ABO_FOREIGN_MEMBERS_ADDRESSBOOK_ID on ABO_FOREIGN_MEMBERS(ADDRESSBOOK_ID); ----------------------- -- Shared Group Bind -- ----------------------- -- Joins ADDRESSBOOK_HOME and ADDRESSBOOK_OBJECT (kind == group) create table SHARED_GROUP_BIND ( ADDRESSBOOK_HOME_RESOURCE_ID integer not null references ADDRESSBOOK_HOME, GROUP_RESOURCE_ID integer not null references ADDRESSBOOK_OBJECT on delete cascade, EXTERNAL_ID integer default null, GROUP_ADDRESSBOOK_NAME varchar(255) not null, BIND_MODE integer not null, -- enum CALENDAR_BIND_MODE BIND_STATUS integer not null, -- enum CALENDAR_BIND_STATUS BIND_REVISION integer default 0 not null, MESSAGE text, -- FIXME: xml? primary key (ADDRESSBOOK_HOME_RESOURCE_ID, GROUP_RESOURCE_ID), -- implicit index unique (ADDRESSBOOK_HOME_RESOURCE_ID, GROUP_ADDRESSBOOK_NAME) -- implicit index ); create index SHARED_GROUP_BIND_RESOURCE_ID on SHARED_GROUP_BIND(GROUP_RESOURCE_ID); --------------- -- Revisions -- --------------- -- create sequence REVISION_SEQ; ------------------------------- -- Calendar Object Revisions -- ------------------------------- create table CALENDAR_OBJECT_REVISIONS ( CALENDAR_HOME_RESOURCE_ID integer not null references CALENDAR_HOME, CALENDAR_RESOURCE_ID integer references CALENDAR, CALENDAR_NAME varchar(255) default null, RESOURCE_NAME varchar(255), REVISION integer default nextval('REVISION_SEQ') not null, DELETED boolean not null ); create index CALENDAR_OBJECT_REVISIONS_HOME_RESOURCE_ID_CALENDAR_RESOURCE_ID on CALENDAR_OBJECT_REVISIONS(CALENDAR_HOME_RESOURCE_ID, CALENDAR_RESOURCE_ID); create index CALENDAR_OBJECT_REVISIONS_RESOURCE_ID_RESOURCE_NAME_DELETED_REVISION on CALENDAR_OBJECT_REVISIONS(CALENDAR_RESOURCE_ID, RESOURCE_NAME, DELETED, REVISION); create index CALENDAR_OBJECT_REVISIONS_RESOURCE_ID_REVISION on CALENDAR_OBJECT_REVISIONS(CALENDAR_RESOURCE_ID, REVISION); ---------------------------------- -- AddressBook Object Revisions -- ---------------------------------- create table ADDRESSBOOK_OBJECT_REVISIONS ( ADDRESSBOOK_HOME_RESOURCE_ID integer not null references ADDRESSBOOK_HOME, OWNER_HOME_RESOURCE_ID integer references ADDRESSBOOK_HOME, ADDRESSBOOK_NAME varchar(255) default null, OBJECT_RESOURCE_ID integer default 0, RESOURCE_NAME varchar(255), REVISION integer default nextval('REVISION_SEQ') not null, DELETED boolean not null ); create index ADDRESSBOOK_OBJECT_REVISIONS_HOME_RESOURCE_ID_OWNER_HOME_RESOURCE_ID on ADDRESSBOOK_OBJECT_REVISIONS(ADDRESSBOOK_HOME_RESOURCE_ID, OWNER_HOME_RESOURCE_ID); create index ADDRESSBOOK_OBJECT_REVISIONS_OWNER_HOME_RESOURCE_ID_RESOURCE_NAME_DELETED_REVISION on ADDRESSBOOK_OBJECT_REVISIONS(OWNER_HOME_RESOURCE_ID, RESOURCE_NAME, DELETED, REVISION); create index ADDRESSBOOK_OBJECT_REVISIONS_OWNER_HOME_RESOURCE_ID_REVISION on ADDRESSBOOK_OBJECT_REVISIONS(OWNER_HOME_RESOURCE_ID, REVISION); ----------------------------------- -- Notification Object Revisions -- ----------------------------------- create table NOTIFICATION_OBJECT_REVISIONS ( NOTIFICATION_HOME_RESOURCE_ID integer not null references NOTIFICATION_HOME on delete cascade, RESOURCE_NAME varchar(255), REVISION integer default nextval('REVISION_SEQ') not null, DELETED boolean not null, unique(NOTIFICATION_HOME_RESOURCE_ID, RESOURCE_NAME) -- implicit index ); create index NOTIFICATION_OBJECT_REVISIONS_RESOURCE_ID_REVISION on NOTIFICATION_OBJECT_REVISIONS(NOTIFICATION_HOME_RESOURCE_ID, REVISION); ------------------------------------------- -- Apple Push Notification Subscriptions -- ------------------------------------------- create table APN_SUBSCRIPTIONS ( TOKEN varchar(255) not null, RESOURCE_KEY varchar(255) not null, MODIFIED integer not null, SUBSCRIBER_GUID varchar(255) not null, USER_AGENT varchar(255) default null, IP_ADDR varchar(255) default null, primary key (TOKEN, RESOURCE_KEY) -- implicit index ); create index APN_SUBSCRIPTIONS_RESOURCE_KEY on APN_SUBSCRIPTIONS(RESOURCE_KEY); ----------------- -- IMIP Tokens -- ----------------- create table IMIP_TOKENS ( TOKEN varchar(255) not null, ORGANIZER varchar(255) not null, ATTENDEE varchar(255) not null, ICALUID varchar(255) not null, ACCESSED timestamp default timezone('UTC', CURRENT_TIMESTAMP), primary key (ORGANIZER, ATTENDEE, ICALUID) -- implicit index ); create index IMIP_TOKENS_TOKEN on IMIP_TOKENS(TOKEN); ---------------- -- Work Items -- ---------------- create sequence WORKITEM_SEQ; --------------------------- -- IMIP Inivitation Work -- --------------------------- create table IMIP_INVITATION_WORK ( WORK_ID integer primary key default nextval('WORKITEM_SEQ') not null, -- implicit index NOT_BEFORE timestamp default timezone('UTC', CURRENT_TIMESTAMP), FROM_ADDR varchar(255) not null, TO_ADDR varchar(255) not null, ICALENDAR_TEXT text not null ); ----------------------- -- IMIP Polling Work -- ----------------------- create table IMIP_POLLING_WORK ( WORK_ID integer primary key default nextval('WORKITEM_SEQ') not null, -- implicit index NOT_BEFORE timestamp default timezone('UTC', CURRENT_TIMESTAMP) ); --------------------- -- IMIP Reply Work -- --------------------- create table IMIP_REPLY_WORK ( WORK_ID integer primary key default nextval('WORKITEM_SEQ') not null, -- implicit index NOT_BEFORE timestamp default timezone('UTC', CURRENT_TIMESTAMP), ORGANIZER varchar(255) not null, ATTENDEE varchar(255) not null, ICALENDAR_TEXT text not null ); ------------------------ -- Push Notifications -- ------------------------ create table PUSH_NOTIFICATION_WORK ( WORK_ID integer primary key default nextval('WORKITEM_SEQ') not null, -- implicit index NOT_BEFORE timestamp default timezone('UTC', CURRENT_TIMESTAMP), PUSH_ID varchar(255) not null, PRIORITY integer not null -- 1:low 5:medium 10:high ); ----------------- -- GroupCacher -- ----------------- create table GROUP_CACHER_POLLING_WORK ( WORK_ID integer primary key default nextval('WORKITEM_SEQ') not null, -- implicit index NOT_BEFORE timestamp default timezone('UTC', CURRENT_TIMESTAMP) ); -------------------------- -- Object Splitter Work -- -------------------------- create table CALENDAR_OBJECT_SPLITTER_WORK ( WORK_ID integer primary key default nextval('WORKITEM_SEQ') not null, -- implicit index NOT_BEFORE timestamp default timezone('UTC', CURRENT_TIMESTAMP), RESOURCE_ID integer not null references CALENDAR_OBJECT on delete cascade ); create index CALENDAR_OBJECT_SPLITTER_WORK_RESOURCE_ID on CALENDAR_OBJECT_SPLITTER_WORK(RESOURCE_ID); -------------------- -- Schema Version -- -------------------- create table CALENDARSERVER ( NAME varchar(255) primary key, -- implicit index VALUE varchar(255) ); insert into CALENDARSERVER values ('VERSION', '32'); insert into CALENDARSERVER values ('CALENDAR-DATAVERSION', '6'); insert into CALENDARSERVER values ('ADDRESSBOOK-DATAVERSION', '2'); insert into CALENDARSERVER values ('NOTIFICATION-DATAVERSION', '1');
red-hood/calendarserver
txdav/common/datastore/sql_schema/old/postgres-dialect/v32.sql
SQL
apache-2.0
27,530
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="ro"> <head> <!-- Generated by javadoc (version 1.7.0_07) on Tue May 27 14:37:22 EEST 2014 --> <title>net.sf.jasperreports.components.map.type (JasperReports 5.6.0 API)</title> <meta name="date" content="2014-05-27"> <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="net.sf.jasperreports.components.map.type (JasperReports 5.6.0 API)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../../net/sf/jasperreports/components/map/fill/package-summary.html">Prev Package</a></li> <li><a href="../../../../../../net/sf/jasperreports/components/sort/package-summary.html">Next Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?net/sf/jasperreports/components/map/type/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h1 title="Package" class="title">Package&nbsp;net.sf.jasperreports.components.map.type</h1> <div class="docSummary"> <div class="block">Contains <code>enum</code> types used by the built-in Google Map component.</div> </div> <p>See:&nbsp;<a href="#package_description">Description</a></p> </div> <div class="contentContainer"> <ul class="blockList"> <li class="blockList"> <table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Enum Summary table, listing enums, and an explanation"> <caption><span>Enum Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Enum</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../../../net/sf/jasperreports/components/map/type/MapImageTypeEnum.html" title="enum in net.sf.jasperreports.components.map.type">MapImageTypeEnum</a></td> <td class="colLast">&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../../../net/sf/jasperreports/components/map/type/MapScaleEnum.html" title="enum in net.sf.jasperreports.components.map.type">MapScaleEnum</a></td> <td class="colLast">&nbsp;</td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../../../net/sf/jasperreports/components/map/type/MapTypeEnum.html" title="enum in net.sf.jasperreports.components.map.type">MapTypeEnum</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> </ul> <a name="package_description"> <!-- --> </a> <h2 title="Package net.sf.jasperreports.components.map.type Description">Package net.sf.jasperreports.components.map.type Description</h2> <div class="block">Contains <code>enum</code> types used by the built-in Google Map component.</div> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../../net/sf/jasperreports/components/map/fill/package-summary.html">Prev Package</a></li> <li><a href="../../../../../../net/sf/jasperreports/components/sort/package-summary.html">Next Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../../../index.html?net/sf/jasperreports/components/map/type/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small> <span style="font-decoration:none;font-family:Arial,Helvetica,sans-serif;font-size:8pt;font-style:normal;color:#000000;">&copy; 2001-2010 Jaspersoft Corporation <a href="http://www.jaspersoft.com" target="_blank" style="color:#000000;">www.jaspersoft.com</a></span> </small></p> </body> </html>
phurtado1112/cnaemvc
lib/JasperReport__5.6/docs/api/net/sf/jasperreports/components/map/type/package-summary.html
HTML
apache-2.0
6,232
/** * Copyright 2014 David L. Whitehurst * * Licensed under the Apache License, Version 2.0 * (the "License"); You may not use this file except * in compliance with the License. You may obtain a * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the * License. * */ package org.musicrecital.webapp.pages; import org.apache.tapestry5.EventContext; import org.apache.tapestry5.Link; import org.apache.tapestry5.alerts.AlertManager; import org.apache.tapestry5.alerts.Duration; import org.apache.tapestry5.alerts.Severity; import org.apache.tapestry5.annotations.InjectPage; import org.apache.tapestry5.ioc.Messages; import org.apache.tapestry5.ioc.annotations.Inject; import org.apache.tapestry5.services.PageRenderLinkSource; import org.musicrecital.service.UserManager; import org.musicrecital.webapp.services.EmailService; import org.musicrecital.webapp.util.RequestUtil; import org.slf4j.Logger; import org.springframework.security.core.userdetails.UsernameNotFoundException; import javax.servlet.http.HttpServletRequest; /** * Generates a password recovery token and sends it via email to user. * */ public class PasswordRecoveryToken { @Inject private Logger logger; @Inject private Messages messages; @Inject private UserManager userManager; @Inject private AlertManager alertManager; @Inject private HttpServletRequest request; @Inject private PageRenderLinkSource pageRenderLinkSource; private String username; Object onActivate(EventContext ctx) { // ensure that the username has been set if (ctx == null || ctx.getCount() == 0) { logger.warn("Username not specified, notifying user that it's a required field."); alertManager.alert(Duration.TRANSIENT, Severity.ERROR, messages.format("errors.required", messages.get("user.username"))); return Login.class; } // Expect username is the first item in the context int userIdx = 0; this.username = ctx.get(String.class, userIdx).trim(); logger.debug("Sending recovery token for username: " + username); try { userManager.sendPasswordRecoveryEmail(username, RequestUtil.getAppURL(request) + getLink()); } catch (final UsernameNotFoundException ignored) { // lets ignore this Throwable exceptionToLog = ignored.getCause() != null ? ignored.getCause() : ignored; logger.error(exceptionToLog.getLocalizedMessage()); } alertManager.alert(Duration.TRANSIENT, Severity.INFO, messages.get("updatePassword.recoveryToken.sent")); return Login.class; } /** * Build link for for password reset * @return URI */ public String getLink() { Link link = pageRenderLinkSource.createPageRenderLinkWithContext(PasswordUpdate.class); link.addParameter("username", "{username}"); link.addParameter("token", "{token}"); return link.toURI(); } }
dlwhitehurst/MusicRecital
src/main/java/org/musicrecital/webapp/pages/PasswordRecoveryToken.java
Java
apache-2.0
3,421
# Minimalist Portfolio Template Whitespace is a responsive portfolio template with minimalist style. Live Preview: https://whitespace.webmasterbits.com Download: https://github.com/webmasterbits/minimalist-porfolio/archive/master.zip MIT License
webmasterbits/minimalist-porfolio
README.md
Markdown
apache-2.0
249
package com.audioseparate; import java.util.Iterator; import java.util.List; import java.util.Set; import android.app.Activity; import android.app.AlertDialog; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothHeadset; import android.bluetooth.BluetoothProfile; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.DialogInterface.OnClickListener; import android.content.Intent; import android.content.IntentFilter; import android.media.AudioManager; import android.os.Bundle; import android.util.Log; import android.view.View; import android.widget.ArrayAdapter; import android.widget.Toast; public class MainActivity extends Activity { private static final String TAG = "Audio Separate - MainActivity: "; private static final boolean DEBUG = true; private AudioManager mAudioManager = null; private Context mContext = null; private BluetoothHeadset mBluetoothHeadset; private boolean scoON = false; private int selected = 0; private String[] choices = null; private String[] addresses = null; /* Broadcast receiver for the SCO State broadcast intent.*/ private final BroadcastReceiver mSCOHeadsetAudioState = new BroadcastReceiver() { public void onReceive(Context context, Intent intent) { //if(DEBUG) // Log.e(TAG, " mSCOHeadsetAudioState--->onReceive"); int state = intent.getIntExtra(AudioManager.EXTRA_SCO_AUDIO_STATE, -1); if (state == AudioManager.SCO_AUDIO_STATE_CONNECTED) { DisplayToast("BT Recording is Ready"); scoON = true; Intent recordIntent = new Intent(context, Record.class); recordIntent.putExtras(intent); startActivity(recordIntent); } else if (state == AudioManager.SCO_AUDIO_STATE_DISCONNECTED) { DisplayToast("BT Recording Disabled"); scoON = false; } } }; //PC Connect Connection Receiver private final BroadcastReceiver PCCommand = new BroadcastReceiver() { public void onReceive(Context context, Intent intent) { if(!scoON){ } } }; // Define Service Listener of BluetoothProfile private BluetoothProfile.ServiceListener mProfileListener = new BluetoothProfile.ServiceListener() { public void onServiceConnected(int profile, BluetoothProfile proxy) { if (profile == BluetoothProfile.HEADSET) { mBluetoothHeadset = (BluetoothHeadset) proxy; List<BluetoothDevice> pairedDevices = mBluetoothHeadset.getConnectedDevices(); // If there are paired devices if (pairedDevices.size() > 0) { startSCO(); for (BluetoothDevice device : pairedDevices) { Log.e(TAG, "BT Device :"+device.getName()+ " , BD_ADDR:" + device.getAddress()); //Print out Headset name } } else { Toast.makeText(mContext, "Could not find a connected Headset, please connect a headset", Toast.LENGTH_LONG).show(); return; } } } public void onServiceDisconnected(int profile) { if (profile == BluetoothProfile.HEADSET) { mBluetoothHeadset = null; } } }; // Local Bluetooth adapter private BluetoothAdapter mBluetoothAdapter = null; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mContext = this; // Get local Bluetooth adapter mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); // If the adapter is null, then Bluetooth is not supported if (mBluetoothAdapter == null) { Toast.makeText(this, "Bluetooth is not available", Toast.LENGTH_LONG).show(); finish(); return; } int REQUEST_ENABLE_BT = RESULT_OK; // Check whether BT is enabled if (!mBluetoothAdapter.isEnabled()) { //checks if bluetooth is enabled, if not it asks for permission to enable it Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE); startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT); } if(REQUEST_ENABLE_BT != RESULT_OK){ return; } } public void SCOSetup(View view){ Set<BluetoothDevice> devices = mBluetoothAdapter.getBondedDevices(); Iterator<BluetoothDevice> iter = devices.iterator(); choices = new String[devices.size()]; addresses = new String[devices.size()]; for(int i = 0; i<devices.size(); i++){ BluetoothDevice temp = iter.next(); choices[i] = temp.getName(); addresses[i] = temp.getAddress(); } ArrayAdapter<String> adapter = new ArrayAdapter<String>(this,android.R.layout.simple_list_item_1, choices); // 1. Instantiate an AlertDialog.Builder with its constructor AlertDialog.Builder builder = new AlertDialog.Builder(this); // 2. Chain together various setter methods to set the dialog characteristics builder.setTitle("Bluetooth PC") .setNegativeButton("Cancel", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog,int id) { dialog.dismiss(); } }) .setCancelable(false) .setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog,int id) { PCConnect(addresses[selected]); dialog.dismiss(); } }) .setSingleChoiceItems(adapter, -1, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { selected = which; String msg = "You Selected: " + choices[selected]; Toast.makeText(mContext, msg, Toast.LENGTH_SHORT).show(); } }); // 3. Get the AlertDialog from create() AlertDialog dialog = builder.create(); dialog.show(); } public void PCConnect(String PCAddr){ IntentFilter newintent = new IntentFilter(); newintent.addAction("CONNECT_PC"); mContext.registerReceiver(PCCommand, newintent); mBluetoothAdapter.getProfileProxy(mContext, mProfileListener, BluetoothProfile.HEADSET); BTConnect BTPC = new BTConnect(PCAddr); BTPC.Start(); } public void startSCO(){ IntentFilter newintent = new IntentFilter(); newintent.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED); mContext.registerReceiver(mSCOHeadsetAudioState, newintent); // get the Audio Service context mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); if (mAudioManager == null){ Log.e(TAG, "Audiomanager is null"); finish(); return; } if(DEBUG) Log.e(TAG, "SCO Start Attempted"); if(!scoON){ scoON = true; mAudioManager.startBluetoothSco(); }else{ DisplayToast("Audio Stream already started, or starting"); } } @Override public void onPause(){ super.onPause(); } @Override public void onResume(){ super.onResume(); } @Override public void onStop(){ onDestroy(); } public void onDestroy(){ super.onDestroy(); } private void DisplayToast(String msg) { Toast.makeText(getBaseContext(), msg, Toast.LENGTH_SHORT).show(); } }
siracoj/BluetoothRecord
AudioSeparate/src/com/audioseparate/MainActivity.java
Java
apache-2.0
8,016
/* * Licensed to Crate.io GmbH ("Crate") under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. Crate licenses * this file to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial agreement. */ package io.crate.execution.ddl.tables; import io.crate.Constants; import io.crate.analyze.BoundCreateTable; import io.crate.exceptions.Exceptions; import io.crate.exceptions.SQLExceptions; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Singleton; import javax.annotation.Nullable; import java.util.Collections; import java.util.concurrent.CompletableFuture; @Singleton public class TableCreator { protected static final Logger LOGGER = LogManager.getLogger(TableCreator.class); private final TransportCreateTableAction transportCreateTableAction; @Inject public TableCreator(TransportCreateTableAction transportCreateIndexAction) { this.transportCreateTableAction = transportCreateIndexAction; } public CompletableFuture<Long> create(BoundCreateTable createTable) { var templateName = createTable.templateName(); var relationName = createTable.tableIdent(); var createTableRequest = templateName == null ? new CreateTableRequest( new CreateIndexRequest( relationName.indexNameOrAlias(), createTable.tableParameter().settings() ).mapping(Constants.DEFAULT_MAPPING_TYPE, createTable.mapping()) ) : new CreateTableRequest( new PutIndexTemplateRequest(templateName) .mapping(Constants.DEFAULT_MAPPING_TYPE, createTable.mapping()) .create(true) .settings(createTable.tableParameter().settings()) .patterns(Collections.singletonList(createTable.templatePrefix())) .order(100) .alias(new Alias(relationName.indexNameOrAlias())) ); return transportCreateTableAction.execute(createTableRequest, resp -> { if (!resp.isAllShardsAcked() && LOGGER.isWarnEnabled()) { LOGGER.warn("CREATE TABLE `{}` was not acknowledged. This could lead to inconsistent state.", relationName.fqn()); } return 1L; }).exceptionally(error -> { Throwable t = SQLExceptions.unwrap(error); String message = t.getMessage(); Throwable cause = t.getCause(); if ("mapping [default]".equals(message) && cause != null) { // this is a generic mapping parse exception, // the cause has usually a better more detailed error message return Exceptions.rethrowRuntimeException(cause); } else if (createTable.ifNotExists() && isTableExistsError(t, templateName)) { return 0L; } else { return Exceptions.rethrowRuntimeException(t); } }); } private static boolean isTableExistsError(Throwable e, @Nullable String templateName) { return e instanceof ResourceAlreadyExistsException || (templateName != null && isTemplateAlreadyExistsException(e)); } private static boolean isTemplateAlreadyExistsException(Throwable e) { return e instanceof IllegalArgumentException && e.getMessage() != null && e.getMessage().endsWith("already exists"); } }
crate/crate
server/src/main/java/io/crate/execution/ddl/tables/TableCreator.java
Java
apache-2.0
4,617
package org.pac4j.cas.redirect; import org.junit.Test; import org.pac4j.cas.client.CasClient; import org.pac4j.cas.config.CasConfiguration; import org.pac4j.cas.config.CasProtocol; import org.pac4j.core.context.MockWebContext; import org.pac4j.core.context.session.MockSessionStore; import org.pac4j.core.exception.http.FoundAction; import org.pac4j.core.redirect.RedirectionActionBuilder; import org.pac4j.core.util.TestsConstants; import static org.junit.Assert.*; /** * Tests {@link CasRedirectionActionBuilder}. * * @author Jerome LELEU * @since 3.7.0 */ public final class CasRedirectionActionBuilderTest implements TestsConstants { @Test public void testRedirect() { final var builder = newBuilder(new CasConfiguration()); final var action = builder.getRedirectionAction(MockWebContext.create(), new MockSessionStore()).get(); assertTrue(action instanceof FoundAction); assertEquals(LOGIN_URL + "?service=http%3A%2F%2Fwww.pac4j.org%2Ftest.html%3Fclient_name%3DCasClient", ((FoundAction) action).getLocation()); } @Test public void testRedirectGatewayAttribute() { final var builder = newBuilder(new CasConfiguration()); final var context = MockWebContext.create(); context.setRequestAttribute(RedirectionActionBuilder.ATTRIBUTE_PASSIVE, true); final var action = builder.getRedirectionAction(context, new MockSessionStore()).get(); assertTrue(action instanceof FoundAction); assertTrue(((FoundAction) action).getLocation().contains("gateway=true")); } @Test public void testRedirectRenewAttribute() { final var builder = newBuilder(new CasConfiguration()); final var context = MockWebContext.create(); context.setRequestAttribute(RedirectionActionBuilder.ATTRIBUTE_FORCE_AUTHN, true); final var action = builder.getRedirectionAction(context, new MockSessionStore()).get(); assertTrue(action instanceof FoundAction); assertTrue(((FoundAction) action).getLocation().contains("renew=true")); } @Test public void testRedirectWithMethod() { final var config = new CasConfiguration(); config.setMethod("post"); final var builder = newBuilder(config); final var action = builder.getRedirectionAction(MockWebContext.create(), new MockSessionStore()).get(); assertTrue(action instanceof FoundAction); assertEquals(LOGIN_URL + "?service=http%3A%2F%2Fwww.pac4j.org%2Ftest.html%3Fclient_name%3DCasClient&method=post", ((FoundAction) action).getLocation()); } @Test public void testRedirectForSAMLProtocol() { final var config = new CasConfiguration(); config.setProtocol(CasProtocol.SAML); final var builder = newBuilder(config); final var action = builder.getRedirectionAction(MockWebContext.create(), new MockSessionStore()).get(); assertTrue(action instanceof FoundAction); assertEquals(LOGIN_URL + "?TARGET=http%3A%2F%2Fwww.pac4j.org%2Ftest.html%3Fclient_name%3DCasClient", ((FoundAction) action).getLocation()); } private CasRedirectionActionBuilder newBuilder(final CasConfiguration config) { config.setLoginUrl(LOGIN_URL); final var client = new CasClient(config); client.setCallbackUrl(PAC4J_URL); client.init(); return (CasRedirectionActionBuilder) client.getRedirectionActionBuilder(); } }
pac4j/pac4j
pac4j-cas/src/test/java/org/pac4j/cas/redirect/CasRedirectionActionBuilderTest.java
Java
apache-2.0
3,476
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #pragma once #include <aws/core/client/AWSError.h> #include <aws/core/client/CoreErrors.h> #include <aws/amplifyuibuilder/AmplifyUIBuilder_EXPORTS.h> namespace Aws { namespace AmplifyUIBuilder { enum class AmplifyUIBuilderErrors { //From Core// ////////////////////////////////////////////////////////////////////////////////////////// INCOMPLETE_SIGNATURE = 0, INTERNAL_FAILURE = 1, INVALID_ACTION = 2, INVALID_CLIENT_TOKEN_ID = 3, INVALID_PARAMETER_COMBINATION = 4, INVALID_QUERY_PARAMETER = 5, INVALID_PARAMETER_VALUE = 6, MISSING_ACTION = 7, // SDK should never allow MISSING_AUTHENTICATION_TOKEN = 8, // SDK should never allow MISSING_PARAMETER = 9, // SDK should never allow OPT_IN_REQUIRED = 10, REQUEST_EXPIRED = 11, SERVICE_UNAVAILABLE = 12, THROTTLING = 13, VALIDATION = 14, ACCESS_DENIED = 15, RESOURCE_NOT_FOUND = 16, UNRECOGNIZED_CLIENT = 17, MALFORMED_QUERY_STRING = 18, SLOW_DOWN = 19, REQUEST_TIME_TOO_SKEWED = 20, INVALID_SIGNATURE = 21, SIGNATURE_DOES_NOT_MATCH = 22, INVALID_ACCESS_KEY_ID = 23, REQUEST_TIMEOUT = 24, NETWORK_CONNECTION = 99, UNKNOWN = 100, /////////////////////////////////////////////////////////////////////////////////////////// INTERNAL_SERVER= static_cast<int>(Aws::Client::CoreErrors::SERVICE_EXTENSION_START_RANGE) + 1, INVALID_PARAMETER, RESOURCE_CONFLICT, SERVICE_QUOTA_EXCEEDED }; class AWS_AMPLIFYUIBUILDER_API AmplifyUIBuilderError : public Aws::Client::AWSError<AmplifyUIBuilderErrors> { public: AmplifyUIBuilderError() {} AmplifyUIBuilderError(const Aws::Client::AWSError<Aws::Client::CoreErrors>& rhs) : Aws::Client::AWSError<AmplifyUIBuilderErrors>(rhs) {} AmplifyUIBuilderError(Aws::Client::AWSError<Aws::Client::CoreErrors>&& rhs) : Aws::Client::AWSError<AmplifyUIBuilderErrors>(rhs) {} AmplifyUIBuilderError(const Aws::Client::AWSError<AmplifyUIBuilderErrors>& rhs) : Aws::Client::AWSError<AmplifyUIBuilderErrors>(rhs) {} AmplifyUIBuilderError(Aws::Client::AWSError<AmplifyUIBuilderErrors>&& rhs) : Aws::Client::AWSError<AmplifyUIBuilderErrors>(rhs) {} template <typename T> T GetModeledError(); }; namespace AmplifyUIBuilderErrorMapper { AWS_AMPLIFYUIBUILDER_API Aws::Client::AWSError<Aws::Client::CoreErrors> GetErrorForName(const char* errorName); } } // namespace AmplifyUIBuilder } // namespace Aws
aws/aws-sdk-cpp
aws-cpp-sdk-amplifyuibuilder/include/aws/amplifyuibuilder/AmplifyUIBuilderErrors.h
C
apache-2.0
2,473
using CodeModel.Builder; using CodeModel.Dependencies; using CodeModel.Graphs; namespace CodeModel.Primitives.Mutators { [Provide(Resources.EntryPoint)] public class AddApplicationEntryPoint : IGraphMutator { public void Mutate(Graph model) { model.AddNode(new ApplicationEntryPoint()); } } }
Novakov/HighLevelCodeAnalysis
src/CodeModel/Primitives/Mutators/AddApplicationEntryPoint.cs
C#
apache-2.0
348
//= require jquery //= require jquery_ujs //= require foundation //= require turbolinks //= require_tree .
converge-proximity-network/converge-backend
app/assets/javascripts/application.js
JavaScript
apache-2.0
107
//===--- lib/CodeGen/DebugLocStream.h - DWARF debug_loc stream --*- C++ -*-===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// #ifndef LLVM_LIB_CODEGEN_ASMPRINTER_DEBUGLOCSTREAM_H #define LLVM_LIB_CODEGEN_ASMPRINTER_DEBUGLOCSTREAM_H #include "ByteStreamer.h" #include "llvm/ADT/ArrayRef.h" #include "llvm/ADT/SmallVector.h" namespace llvm { class AsmPrinter; class DbgVariable; class DwarfCompileUnit; class MachineInstr; class MCSymbol; /// Byte stream of .debug_loc entries. /// /// Stores a unified stream of .debug_loc entries. There's \a List for each /// variable/inlined-at pair, and an \a Entry for each \a DebugLocEntry. /// /// FIXME: Do we need all these temp symbols? /// FIXME: Why not output directly to the output stream? class DebugLocStream { public: struct List { DwarfCompileUnit *CU; MCSymbol *Label = nullptr; size_t EntryOffset; List(DwarfCompileUnit *CU, size_t EntryOffset) : CU(CU), EntryOffset(EntryOffset) {} }; struct Entry { const MCSymbol *Begin; const MCSymbol *End; size_t ByteOffset; size_t CommentOffset; }; private: SmallVector<List, 4> Lists; SmallVector<Entry, 32> Entries; SmallString<256> DWARFBytes; std::vector<std::string> Comments; MCSymbol *Sym; /// Only verbose textual output needs comments. This will be set to /// true for that case, and false otherwise. bool GenerateComments; public: DebugLocStream(bool GenerateComments) : GenerateComments(GenerateComments) { } size_t getNumLists() const { return Lists.size(); } const List &getList(size_t LI) const { return Lists[LI]; } ArrayRef<List> getLists() const { return Lists; } MCSymbol *getSym() const { return Sym; } void setSym(MCSymbol *Sym) { this->Sym = Sym; } class ListBuilder; class EntryBuilder; private: /// Start a new .debug_loc entry list. /// /// Start a new .debug_loc entry list. Return the new list's index so it can /// be retrieved later via \a getList(). /// /// Until the next call, \a startEntry() will add entries to this list. size_t startList(DwarfCompileUnit *CU) { size_t LI = Lists.size(); Lists.emplace_back(CU, Entries.size()); return LI; } /// Finalize a .debug_loc entry list. /// /// If there are no entries in this list, delete it outright. Otherwise, /// create a label with \a Asm. /// /// \return false iff the list is deleted. bool finalizeList(AsmPrinter &Asm); /// Start a new .debug_loc entry. /// /// Until the next call, bytes added to the stream will be added to this /// entry. void startEntry(const MCSymbol *BeginSym, const MCSymbol *EndSym) { assert(&BeginSym->getSection() == &EndSym->getSection() && "debug_loc entries cannot span across multiple sections"); Entries.push_back({BeginSym, EndSym, DWARFBytes.size(), Comments.size()}); } /// Finalize a .debug_loc entry, deleting if it's empty. void finalizeEntry(); public: BufferByteStreamer getStreamer() { return BufferByteStreamer(DWARFBytes, Comments, GenerateComments); } ArrayRef<Entry> getEntries(const List &L) const { size_t LI = getIndex(L); return makeArrayRef(Entries) .slice(Lists[LI].EntryOffset, getNumEntries(LI)); } ArrayRef<char> getBytes(const Entry &E) const { size_t EI = getIndex(E); return makeArrayRef(DWARFBytes.begin(), DWARFBytes.end()) .slice(Entries[EI].ByteOffset, getNumBytes(EI)); } ArrayRef<std::string> getComments(const Entry &E) const { size_t EI = getIndex(E); return makeArrayRef(Comments) .slice(Entries[EI].CommentOffset, getNumComments(EI)); } private: size_t getIndex(const List &L) const { assert(&Lists.front() <= &L && &L <= &Lists.back() && "Expected valid list"); return &L - &Lists.front(); } size_t getIndex(const Entry &E) const { assert(&Entries.front() <= &E && &E <= &Entries.back() && "Expected valid entry"); return &E - &Entries.front(); } size_t getNumEntries(size_t LI) const { if (LI + 1 == Lists.size()) return Entries.size() - Lists[LI].EntryOffset; return Lists[LI + 1].EntryOffset - Lists[LI].EntryOffset; } size_t getNumBytes(size_t EI) const { if (EI + 1 == Entries.size()) return DWARFBytes.size() - Entries[EI].ByteOffset; return Entries[EI + 1].ByteOffset - Entries[EI].ByteOffset; } size_t getNumComments(size_t EI) const { if (EI + 1 == Entries.size()) return Comments.size() - Entries[EI].CommentOffset; return Entries[EI + 1].CommentOffset - Entries[EI].CommentOffset; } }; /// Builder for DebugLocStream lists. class DebugLocStream::ListBuilder { DebugLocStream &Locs; AsmPrinter &Asm; DbgVariable &V; const MachineInstr &MI; size_t ListIndex; Optional<uint8_t> TagOffset; public: ListBuilder(DebugLocStream &Locs, DwarfCompileUnit &CU, AsmPrinter &Asm, DbgVariable &V, const MachineInstr &MI) : Locs(Locs), Asm(Asm), V(V), MI(MI), ListIndex(Locs.startList(&CU)), TagOffset(None) {} void setTagOffset(uint8_t TO) { TagOffset = TO; } /// Finalize the list. /// /// If the list is empty, delete it. Otherwise, finalize it by creating a /// temp symbol in \a Asm and setting up the \a DbgVariable. ~ListBuilder(); DebugLocStream &getLocs() { return Locs; } }; /// Builder for DebugLocStream entries. class DebugLocStream::EntryBuilder { DebugLocStream &Locs; public: EntryBuilder(ListBuilder &List, const MCSymbol *Begin, const MCSymbol *End) : Locs(List.getLocs()) { Locs.startEntry(Begin, End); } /// Finalize the entry, deleting it if it's empty. ~EntryBuilder() { Locs.finalizeEntry(); } BufferByteStreamer getStreamer() { return Locs.getStreamer(); } }; } // namespace llvm #endif
google/llvm-propeller
llvm/lib/CodeGen/AsmPrinter/DebugLocStream.h
C
apache-2.0
6,049
/* * Copyright (c) 2015 Raytheon BBN Technologies Corp * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.poi.xdgf.usermodel.section.geometry; import org.apache.poi.POIXMLException; import org.apache.poi.xdgf.util.ObjectFactory; import com.microsoft.schemas.office.visio.x2012.main.RowType; public class GeometryRowFactory { static final ObjectFactory<GeometryRow, RowType> _rowTypes; static { _rowTypes = new ObjectFactory<>(); try { _rowTypes.put("ArcTo", ArcTo.class, RowType.class); _rowTypes.put("Ellipse", Ellipse.class, RowType.class); _rowTypes.put("EllipticalArcTo", EllipticalArcTo.class, RowType.class); _rowTypes.put("InfiniteLine", InfiniteLine.class, RowType.class); _rowTypes.put("LineTo", LineTo.class, RowType.class); _rowTypes.put("MoveTo", MoveTo.class, RowType.class); _rowTypes.put("NURBSTo", NURBSTo.class, RowType.class); _rowTypes.put("PolyLineTo", PolyLineTo.class, RowType.class); _rowTypes.put("RelCubBezTo", RelCubBezTo.class, RowType.class); _rowTypes.put("RelEllipticalArcTo", RelEllipticalArcTo.class, RowType.class); _rowTypes.put("RelLineTo", RelLineTo.class, RowType.class); _rowTypes.put("RelMoveTo", RelMoveTo.class, RowType.class); _rowTypes.put("RelQuadBezTo", RelQuadBezTo.class, RowType.class); _rowTypes.put("SplineKnot", SplineKnot.class, RowType.class); _rowTypes.put("SplineStart", SplineStart.class, RowType.class); } catch (NoSuchMethodException | SecurityException e) { throw new POIXMLException("Internal error", e); } } public static GeometryRow load(RowType row) { return _rowTypes.load(row.getT(), row); } }
BBN-D/poi-visio
src/main/java/org/apache/poi/xdgf/usermodel/section/geometry/GeometryRowFactory.java
Java
apache-2.0
2,166
/* Copyright (C) 2013-2022 TU Dortmund * This file is part of AutomataLib, http://www.automatalib.net/. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.automatalib.util.partitionrefinement; import net.automatalib.automata.fsa.impl.compact.CompactDFA; import net.automatalib.automata.transducers.impl.compact.CompactMealy; import net.automatalib.util.automata.Automata; import net.automatalib.util.automata.builders.AutomatonBuilders; import net.automatalib.util.automata.equivalence.NearLinearEquivalenceTest; import net.automatalib.util.automata.minimizer.paigetarjan.PaigeTarjanMinimization; import net.automatalib.words.Alphabet; import net.automatalib.words.impl.Alphabets; import org.testng.Assert; import org.testng.annotations.Test; /** * @author frohme */ public class PaigeTarjanTest { private static final String SINK_OUTPUT = "sink"; /** * Builds binary tree (partial due to 4 input symbols), whose leaves end in a sink and add two unreachable states. * s3 and s5 are equivalent, s8, s9 equivalent and unreachable. */ public static CompactMealy<Integer, String> getMealy() { final Alphabet<Integer> alphabet = Alphabets.integers(1, 4); // @formatter:off return AutomatonBuilders.<Integer, String>newMealy(alphabet) .withInitial(0) .from(0) .on(2).withOutput("2,4").to(1) .on(1).withOutput("1,4").to(2) .from(1) .on(1).withOutput("1,3").to(3) .on(4).withOutput("2,3").to(4) .from(2) .on(2).withOutput("2,3").to(5) .on(4).withOutput("1,3").to(6) .from(3).on(3, 4).withOutput("").to(7) .from(4).on(1, 3).withOutput("").to(7) .from(5).on(3, 4).withOutput("").to(7) .from(6).on(2, 3).withOutput("").to(7) .from(8) .on(1).withOutput(SINK_OUTPUT).loop() .on(2).withOutput(SINK_OUTPUT).to(9) .from(9).on(1, 2).withOutput(SINK_OUTPUT).loop() .create(); // @formatter:on } public static CompactDFA<Character> getDFA() { final Alphabet<Character> alphabet = Alphabets.characters('a', 'c'); // @formatter:off return AutomatonBuilders.newDFA(alphabet) .withInitial(0) .from(0) .on('a').to(1) .on('b').to(2) .on('c').to(3) .from(3).on('a', 'b', 'c').loop() .withAccepting(0) .create(); // @formatter:on } @Test public void testDFAMinimization() { final CompactDFA<Character> dfa = getDFA(); final CompactDFA<Character> stateMinimized = PaigeTarjanMinimization.minimizeDFA(dfa); // here, states 1, 2, 3 should fall together because they are all rejecting Assert.assertEquals(stateMinimized.size(), 2); // equivalence checks full state signature Assert.assertFalse(Automata.testEquivalence(dfa, stateMinimized, dfa.getInputAlphabet())); // when ignoring undefined transitions, they should be equivalent Assert.assertNull(NearLinearEquivalenceTest.findSeparatingWord(dfa, stateMinimized, dfa.getInputAlphabet(), true)); final CompactDFA<Character> fullMinimized = PaigeTarjanMinimization.minimizeUniversal(dfa, dfa.getInputAlphabet(), new CompactDFA.Creator<>(), AutomatonInitialPartitioning.BY_FULL_SIGNATURE, null); Assert.assertEquals(fullMinimized.size(), 3); // BY_FULL_SIGNATURE should yield full-signature-equivalent model Assert.assertTrue(Automata.testEquivalence(dfa, fullMinimized, dfa.getInputAlphabet())); } @Test public void testMealyMinimizationByStateProperties() { testMealyConfiguration(AutomatonInitialPartitioning.BY_STATE_PROPERTY, null, 1, false); // when using false (a non-used state property) we want essentially no collapsing of states, because all states // reach an undefined transition at a different point in time testMealyConfiguration(AutomatonInitialPartitioning.BY_STATE_PROPERTY, false, 7, true); } @Test public void testMealyMinimizationByTransitionProperties() { final CompactMealy<Integer, String> mealy = getMealy(); testMealyConfiguration(AutomatonInitialPartitioning.BY_TRANSITION_PROPERTIES, StateSignature.byTransitionProperties(mealy, mealy.getInputAlphabet(), 6), 7, true); testMealyConfiguration(AutomatonInitialPartitioning.BY_TRANSITION_PROPERTIES, StateSignature.byTransitionProperties(SINK_OUTPUT, SINK_OUTPUT, SINK_OUTPUT, SINK_OUTPUT), 7, true); testMealyConfiguration(AutomatonInitialPartitioning.BY_TRANSITION_PROPERTIES, null, 7, true); } @Test public void testMealyMinimizationByFullProperties() { final CompactMealy<Integer, String> mealy = getMealy(); testMealyConfiguration(AutomatonInitialPartitioning.BY_FULL_SIGNATURE, StateSignature.byFullSignature(mealy, mealy.getInputAlphabet(), 6), 7, true); testMealyConfiguration(AutomatonInitialPartitioning.BY_FULL_SIGNATURE, StateSignature.byFullSignature(null, SINK_OUTPUT, SINK_OUTPUT, SINK_OUTPUT, SINK_OUTPUT), 7, true); testMealyConfiguration(AutomatonInitialPartitioning.BY_FULL_SIGNATURE, null, 7, true); } private void testMealyConfiguration(AutomatonInitialPartitioning ap, Object sinkClassification, int expectedSize, boolean equivalent) { final CompactMealy<Integer, String> mealy = getMealy(); final CompactMealy<Integer, String> minimized = PaigeTarjanMinimization.minimizeUniversal(mealy, mealy.getInputAlphabet(), new CompactMealy.Creator<>(), ap, sinkClassification); Assert.assertEquals(minimized.size(), expectedSize); Assert.assertEquals(Automata.testEquivalence(mealy, minimized, mealy.getInputAlphabet()), equivalent); } }
LearnLib/automatalib
util/src/test/java/net/automatalib/util/partitionrefinement/PaigeTarjanTest.java
Java
apache-2.0
8,662
package com.max.tang.demokiller.utils; /** * Created by zhihuitang on 2017-01-17. */ public class ConnectionStateMonitor { }
zhihuitang/AndroidDemoKiller
app/src/main/java/com/max/tang/demokiller/utils/ConnectionStateMonitor.java
Java
apache-2.0
129
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.core.sql; import java.util.List; import com.orientechnologies.common.util.OPair; import com.orientechnologies.orient.core.index.OIndex; import com.orientechnologies.orient.core.index.OIndexDefinition; /** * @author <a href="mailto:enisher@gmail.com">Artem Orobets</a> */ public class OOrderByOptimizer { boolean canBeUsedByOrderBy(OIndex<?> index, List<OPair<String, String>> orderedFields) { if (orderedFields.isEmpty()) return false; if (!index.supportsOrderedIterations()) return false; final OIndexDefinition definition = index.getDefinition(); final List<String> fields = definition.getFields(); final int endIndex = Math.min(fields.size(), orderedFields.size()); final String firstOrder = orderedFields.get(0).getValue(); for (int i = 0; i < endIndex; i++) { final OPair<String, String> pair = orderedFields.get(i); if (!firstOrder.equals(pair.getValue())) return false; final String orderFieldName = orderedFields.get(i).getKey().toLowerCase(); final String indexFieldName = fields.get(i).toLowerCase(); if (!orderFieldName.equals(indexFieldName)) return false; } return true; } }
DiceHoldingsInc/orientdb
core/src/main/java/com/orientechnologies/orient/core/sql/OOrderByOptimizer.java
Java
apache-2.0
2,016
# OpenConext-eduproxy [![Build Status](https://travis-ci.org/OpenConext/OpenConext-eduproxy.svg)](https://travis-ci.org/OpenConext/OpenConext-eduproxy) [![codecov.io](https://codecov.io/gh/OpenConext/OpenConext-eduproxy/coverage.svg)](https://codecov.io/gh/OpenConext/OpenConext-eduproxy) eduProxy is a SAML Proxy acting as a Identity Provider for all eduGain Service Providers and acting as a ServiceProvider in the OpenConext SAML Federation The Proxy behaviour can be configured in order for the eduProxy to be used as a generic IdP-SP SAML proxy with hooks for authnResponse 'enrichment'. ## [Getting started](#getting-started) ### [System Requirements](#system-requirements) - Java 7 - Maven 3 ### [Building and running](#building-and-running) This project uses Spring Boot and Maven. To run locally, type: ```bash mvn spring-boot:run ``` When developing, it's convenient to just execute the applications main-method, which is in [Application](src/main/java/eduproxy/Application.java). ## [SAML metadata](#saml-metadata) The eduProxy metadata is generated and accessible on [http://localhost:8080/sp/metadata](http://localhost:8080/sp/metadata) and [http://localhost:8080/idp/metadata](http://localhost:8080/idp/metadata). The metadata is cached and refreshed every 24 hours. This can be configured: ```yml proxy: # duration of metadata cache (1 day) validity_duration_metadata_ms: 86400000 ``` The Service Providers allowed to connect to the eduProxy are provided in a Metadata feed configured in ```application.yml```: ```yml serviceproviders: feed: http://mds.edugain.org/ ``` By default - but easily changed / overridden - all Service Providers in the SAML metadata feed are allowed to connect. See [ServiceProviderFeedParser](src/main/java/eduproxy/saml/ServiceProviderFeedParser.java). The feed can also be a file url when developing locally: ```yml serviceproviders: feed: classpath:saml/edugain.xml ``` When developing locally or deploying in a test environment eduProxy can be configured to allow any SP to connect by setting `serviceproviders.allow_unknown` to `true`. This is not recommended and the default is `false`. ```yml serviceproviders: allow_unknown: true ``` The metadata of the IdentityProvider (currently we don't allow more then one and assume that a possible WAYF is the responsibility of the actual IdentityProvider proxied by eduProxy) must be provided in the ```application.yml``` ```yml idp: # metadata_url: https://engine.surfconext.nl/authentication/idp/metadata metadata_url: classpath:saml/eb.idp.metadata.xml ``` ## [Testing](#testing) There are integration tests that spin off a running application and these can also be run inside the IDE. There is a test SP endpoint that requires authentication against the configured IdP and displays all SAML attributes received: [http://localhost:8080/test](http://localhost:8080/test) The production SAML flow with a eduProxy is depicted in [this image](src/main/resources/static/images/eduproxy.001.jpeg). ## [Private signing key and public certificate](#signing-keys) The SAML Spring Security library needs a private DSA key / public certificate pair for the eduProxy IdP / SP which can be generated. ```bash openssl req -subj '/O=Organization, CN=EduProxy/' -newkey rsa:2048 -new -x509 -days 3652 -nodes -out eduproxy.crt -keyout eduproxy.pem ``` The Java KeyStore expects a pkcs8 DER format for RSA private keys so we have to re-format that key: ```bash openssl pkcs8 -nocrypt -in eduproxy.pem -topk8 -out eduproxy.der ``` Remove the whitespace, heading and footer from the eduproxy.crt and eduproxy.der: ```bash cat eduproxy.der |head -n -1 |tail -n +2 | tr -d '\n'; echo cat eduproxy.crt |head -n -1 |tail -n +2 | tr -d '\n'; echo ``` Above commands work on linux distributions. On mac you can issue the same command with `ghead` after you install `coreutils`: ```bash brew install coreutils cat eduproxy.der |ghead -n -1 |tail -n +2 | tr -d '\n'; echo cat eduproxy.crt |ghead -n -1 |tail -n +2 | tr -d '\n'; echo ``` Add the eduproxy key pair to the application.yml file: ```yml # eduProxy proxy: private_key: ${output from cleaning the der file} certificate: ${output from cleaning the crt file} ``` ## [Deployment](#deployment) The eduProxy application has documented [properties](src/main/resources/application.yml) packaged inside the jar. When deploying to a non-local environment ensure you have application.yml properties outside of the packaged jar to override the eduProxy configuration.
OpenConext/OpenConext-eduproxy
README.md
Markdown
apache-2.0
4,540
// <auto-generated> // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v8/enums/user_list_string_rule_item_operator.proto // </auto-generated> #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Google.Ads.GoogleAds.V8.Enums { /// <summary>Holder for reflection information generated from google/ads/googleads/v8/enums/user_list_string_rule_item_operator.proto</summary> public static partial class UserListStringRuleItemOperatorReflection { #region Descriptor /// <summary>File descriptor for google/ads/googleads/v8/enums/user_list_string_rule_item_operator.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static UserListStringRuleItemOperatorReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "Ckdnb29nbGUvYWRzL2dvb2dsZWFkcy92OC9lbnVtcy91c2VyX2xpc3Rfc3Ry", "aW5nX3J1bGVfaXRlbV9vcGVyYXRvci5wcm90bxIdZ29vZ2xlLmFkcy5nb29n", "bGVhZHMudjguZW51bXMaHGdvb2dsZS9hcGkvYW5ub3RhdGlvbnMucHJvdG8i", "6QEKIlVzZXJMaXN0U3RyaW5nUnVsZUl0ZW1PcGVyYXRvckVudW0iwgEKHlVz", "ZXJMaXN0U3RyaW5nUnVsZUl0ZW1PcGVyYXRvchIPCgtVTlNQRUNJRklFRBAA", "EgsKB1VOS05PV04QARIMCghDT05UQUlOUxACEgoKBkVRVUFMUxADEg8KC1NU", "QVJUU19XSVRIEAQSDQoJRU5EU19XSVRIEAUSDgoKTk9UX0VRVUFMUxAGEhAK", "DE5PVF9DT05UQUlOUxAHEhMKD05PVF9TVEFSVFNfV0lUSBAIEhEKDU5PVF9F", "TkRTX1dJVEgQCUL4AQohY29tLmdvb2dsZS5hZHMuZ29vZ2xlYWRzLnY4LmVu", "dW1zQiNVc2VyTGlzdFN0cmluZ1J1bGVJdGVtT3BlcmF0b3JQcm90b1ABWkJn", "b29nbGUuZ29sYW5nLm9yZy9nZW5wcm90by9nb29nbGVhcGlzL2Fkcy9nb29n", "bGVhZHMvdjgvZW51bXM7ZW51bXOiAgNHQUGqAh1Hb29nbGUuQWRzLkdvb2ds", "ZUFkcy5WOC5FbnVtc8oCHUdvb2dsZVxBZHNcR29vZ2xlQWRzXFY4XEVudW1z", "6gIhR29vZ2xlOjpBZHM6Okdvb2dsZUFkczo6Vjg6OkVudW1zYgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::Google.Api.AnnotationsReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Ads.GoogleAds.V8.Enums.UserListStringRuleItemOperatorEnum), global::Google.Ads.GoogleAds.V8.Enums.UserListStringRuleItemOperatorEnum.Parser, null, null, new[]{ typeof(global::Google.Ads.GoogleAds.V8.Enums.UserListStringRuleItemOperatorEnum.Types.UserListStringRuleItemOperator) }, null, null) })); } #endregion } #region Messages /// <summary> /// Supported rule operator for string type. /// </summary> public sealed partial class UserListStringRuleItemOperatorEnum : pb::IMessage<UserListStringRuleItemOperatorEnum> #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE , pb::IBufferMessage #endif { private static readonly pb::MessageParser<UserListStringRuleItemOperatorEnum> _parser = new pb::MessageParser<UserListStringRuleItemOperatorEnum>(() => new UserListStringRuleItemOperatorEnum()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pb::MessageParser<UserListStringRuleItemOperatorEnum> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pbr::MessageDescriptor Descriptor { get { return global::Google.Ads.GoogleAds.V8.Enums.UserListStringRuleItemOperatorReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public UserListStringRuleItemOperatorEnum() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public UserListStringRuleItemOperatorEnum(UserListStringRuleItemOperatorEnum other) : this() { _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public UserListStringRuleItemOperatorEnum Clone() { return new UserListStringRuleItemOperatorEnum(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override bool Equals(object other) { return Equals(other as UserListStringRuleItemOperatorEnum); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public bool Equals(UserListStringRuleItemOperatorEnum other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override int GetHashCode() { int hash = 1; if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void WriteTo(pb::CodedOutputStream output) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE output.WriteRawMessage(this); #else if (_unknownFields != null) { _unknownFields.WriteTo(output); } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalWriteTo(ref pb::WriteContext output) { if (_unknownFields != null) { _unknownFields.WriteTo(ref output); } } #endif [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public int CalculateSize() { int size = 0; if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(UserListStringRuleItemOperatorEnum other) { if (other == null) { return; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(pb::CodedInputStream input) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE input.ReadRawMessage(this); #else uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; } } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalMergeFrom(ref pb::ParseContext input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, ref input); break; } } } #endif #region Nested types /// <summary>Container for nested types declared in the UserListStringRuleItemOperatorEnum message type.</summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static partial class Types { /// <summary> /// Enum describing possible user list string rule item operators. /// </summary> public enum UserListStringRuleItemOperator { /// <summary> /// Not specified. /// </summary> [pbr::OriginalName("UNSPECIFIED")] Unspecified = 0, /// <summary> /// Used for return value only. Represents value unknown in this version. /// </summary> [pbr::OriginalName("UNKNOWN")] Unknown = 1, /// <summary> /// Contains. /// </summary> [pbr::OriginalName("CONTAINS")] Contains = 2, /// <summary> /// Equals. /// </summary> [pbr::OriginalName("EQUALS")] Equals = 3, /// <summary> /// Starts with. /// </summary> [pbr::OriginalName("STARTS_WITH")] StartsWith = 4, /// <summary> /// Ends with. /// </summary> [pbr::OriginalName("ENDS_WITH")] EndsWith = 5, /// <summary> /// Not equals. /// </summary> [pbr::OriginalName("NOT_EQUALS")] NotEquals = 6, /// <summary> /// Not contains. /// </summary> [pbr::OriginalName("NOT_CONTAINS")] NotContains = 7, /// <summary> /// Not starts with. /// </summary> [pbr::OriginalName("NOT_STARTS_WITH")] NotStartsWith = 8, /// <summary> /// Not ends with. /// </summary> [pbr::OriginalName("NOT_ENDS_WITH")] NotEndsWith = 9, } } #endregion } #endregion } #endregion Designer generated code
googleads/google-ads-dotnet
src/V8/Types/UserListStringRuleItemOperator.g.cs
C#
apache-2.0
10,541
# Gymnosporia ellenbeckii Loes. SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Celastrales/Celastraceae/Gymnosporia/Gymnosporia ellenbeckii/README.md
Markdown
apache-2.0
179
export class Flag { constructor( public sFlag: string, public lFlag: string, public sDescription: string, public type: string, public guiGroup: string ){} }
bosterholz/bibigrid
bibigrid-gui/src/main/resources/public/app/shared/flag.ts
TypeScript
apache-2.0
205
package com.alibaba.fastjson.serializer; import java.awt.Color; import java.io.IOException; import java.lang.reflect.Type; import java.util.Collections; import java.util.Set; public class ColorSerializer implements AutowiredObjectSerializer { public final static ColorSerializer instance = new ColorSerializer(); public void write(JSONSerializer serializer, Object object, Object fieldName, Type fieldType) throws IOException { SerializeWriter out = serializer.getWriter(); Color color = (Color) object; if (color == null) { out.writeNull(); return; } char sep = '{'; if (out.isEnabled(SerializerFeature.WriteClassName)) { out.write('{'); out.writeFieldName("@type"); out.writeString(Color.class.getName()); sep = ','; } out.writeFieldValue(sep, "r", color.getRed()); out.writeFieldValue(',', "g", color.getGreen()); out.writeFieldValue(',', "b", color.getBlue()); if (color.getAlpha() > 0) { out.writeFieldValue(',', "alpha", color.getAlpha()); } out.write('}'); } public Set<Type> getAutowiredFor() { return Collections.<Type>singleton(Color.class); } }
weghst/typhon
fastjson/src/main/java/com/alibaba/fastjson/serializer/ColorSerializer.java
Java
apache-2.0
1,280
# Boea prolixa C.B. Clarke SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Gesneriaceae/Paraboea/Paraboea prolixa/ Syn. Boea prolixa/README.md
Markdown
apache-2.0
181
#!/usr/bin/env python # -*- coding: utf-8 -*- from turnstile.checks import get_checks from turnstile.manager import get_commands CORE_COMMIT_MSG_CHECKS = ['branch_pattern', 'branch_release', 'branch_type', 'protect_master', 'specification'] CORE_SUBCOMMANDS = ['config', 'install', 'remove', 'specification', 'upgrade', 'version'] def test_checks(): checks = dict(get_checks('commit_msg')) for check_name in CORE_COMMIT_MSG_CHECKS: assert check_name in checks def test_subcommands(): subcommands = dict(get_commands()) for subcommand_name in CORE_SUBCOMMANDS: assert subcommand_name in subcommands
zalando/turnstile
tests/test_discovery.py
Python
apache-2.0
636
<?php class HomeController extends BaseController { public function dash() { if(Config::get('coins.ltc.address')===''){ echo 'The application needs configuration. Environment: '.App::environment(); }else{ return View::make('dashboard'); } } public function litecoin($task){ if($task==='getbalance'){ $data['received'] = (float) Curl::get('http://explorer.litecoin.net/chain/Litecoin/q/getreceivedbyaddress/'.Config::get('coins.ltc.address')); $data['sent'] = (float) Curl::get('http://explorer.litecoin.net/chain/Litecoin/q/getsentbyaddress/'.Config::get('coins.ltc.address')); $data['balance'] = $data['received'] - $data['sent']; return Response::json($data); }else if($task==='getworkers'){ $data = Curl::get_json(Config::get('coins.ltc.api_address').'&action=getuserworkers&api_key='.Config::get('coins.ltc.api_key').'&id='.Config::get('coins.ltc.api_id')); return Response::json($data->{'getuserworkers'}); }else if($task==='getstatus'){ $status = Curl::get_json(Config::get('coins.ltc.api_address').'&action=getuserstatus&api_key='.Config::get('coins.ltc.api_key').'&id='.Config::get('coins.ltc.api_id')); $data['status'] = $status->{'getuserstatus'}; $status = Curl::get_json(Config::get('coins.ltc.api_address').'&action=getpoolstatus&api_key='.Config::get('coins.ltc.api_key').'&id='.Config::get('coins.ltc.api_id')); $data['pool'] = $status->{'getpoolstatus'}; return Response::json($data); } } public function dogecoin($task){ if($task=='getbalance'){ $data['balance'] = (float) Curl::get('http://dogechain.info/chain/Dogecoin/q/addressbalance/'.Config::get('coins.doge.address')); return Response::json($data); }else if($task==='getworkers'){ $data = Curl::get_json(Config::get('coins.doge.api_address').'&action=getuserworkers&api_key='.Config::get('coins.doge.api_key').'&id='.Config::get('coins.doge.api_id')); return Response::json($data->{'getuserworkers'}->{'data'}); }else if($task=='getstatus'){ $status = Curl::get_json(Config::get('coins.doge.api_address').'&action=getuserstatus&api_key='.Config::get('coins.doge.api_key').'&id='.Config::get('coins.doge.api_id')); $data['status'] = $status->{'getuserstatus'}->{'data'}; $balance = Curl::get_json(Config::get('coins.doge.api_address').'&action=getuserbalance&api_key='.Config::get('coins.doge.api_key').'&id='.Config::get('coins.doge.api_id')); $data['balance']['confirmed'] = $balance->{'getuserbalance'}->{'data'}->{'confirmed'}; $data['balance']['unconfirmed'] = $balance->{'getuserbalance'}->{'data'}->{'unconfirmed'}; $status = Curl::get_json(Config::get('coins.doge.api_address').'&action=getpoolstatus&api_key='.Config::get('coins.doge.api_key').'&id='.Config::get('coins.doge.api_id')); $data['pool'] = $status->{'getpoolstatus'}->{'data'}; return Response::json($data); } } public function market_rates(){ // $data['ltc_usd'] = file_get_contents('http://dogechain.info/chain/Dogecoin/q/addressbalance/DDpYPkv1bUMXxfp57Fb8N7Ds6BVmyzhtjn'); $post = array("pairs" => "ltc_usd,ltc_btc,doge_btc,btc_usd"); // fetch data $curl = curl_init(); curl_setopt($curl, CURLOPT_URL, "http://www.cryptocoincharts.info/v2/api/tradingPairs"); curl_setopt($curl, CURLOPT_RETURNTRANSFER, true); curl_setopt($curl, CURLOPT_POST, true); curl_setopt($curl, CURLOPT_POSTFIELDS, $post); $rawData = curl_exec($curl); curl_close($curl); $data = json_decode($rawData); //usd_aud $yqdata = json_decode(file_get_contents('http://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.xchange%20where%20pair%20in%20(%22USDAUD%22)&format=json&env=store%3A%2F%2Fdatatables.org%2Falltableswithkeys&callback='), true); array_push($data, array( 'id'=>'usd/aud', 'price'=>$yqdata['query']['results']['rate']['Rate'] )); //aud_usd $yqdata = json_decode(file_get_contents('http://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.xchange%20where%20pair%20in%20(%22AUDUSD%22)&format=json&env=store%3A%2F%2Fdatatables.org%2Falltableswithkeys&callback='), true); array_push($data, array( 'id'=>'aud/usd', 'price'=>$yqdata['query']['results']['rate']['Rate'] )); return Response::json($data); } }
minhd/coin_dashboard
app/controllers/HomeController.php
PHP
apache-2.0
4,236
#ifndef LIBWEBP_NO_SRC #include "../../libwebp_src/src/dsp/dsp.h" #endif
tischda/hugo-search
vendor/github.com/bep/gowebp/internal/libwebp/dsp.h
C
apache-2.0
73
# fireeye-miner MineMeld Miner for Fireeye's urllist implemented as an extension. ## How it works This simple Miner periodically checks the FireEye urllist.txt also known as FireEye/Bluecoat Integration for new malicious and/or callback URLs and extract those. ## PreConfiguration using a single FireEye NX If there is only a standalone FireEye NX open the Fireeye CLI and enter the following commands: ``` FireEye-NX > ena FireEye-NX # conf t FireEye-NX(config) # swg scan enable FireEye-NX(config) # swg scan period callback-url past 168 hours FireEye-NX(config) # swg scan period malicious-url past 24 hours FireEye-NX(config) # wr mem Saving configuration file ... Done! FireEye-NX(config) # ``` After SWG scan is enabled check if http://<FQDN of your Fireeye NX/urllist.txt is available. ## PreConfiguration using FireEye CMS In case there a multiple FireEye NX appliances. Enable SWG scan as explained above on every Fireeye NX and then perform the same steps on the CMS as well. ``` FireEye-CMS > ena FireEye-CMS # conf t FireEye-CMS(config) # swg scan enable FireEye-CMS(config) # wr mem Saving configuration file ... Done! FireEye-CMS(config) # ``` After SWG scan is enabled check if http://<FQDN of your Fireeye CMS/urllist.txt is available ## Installation You can install this extension directly from the git repo. 1. Logon to you Minemeld installation and browse to Setup -> Extension. 2. Click add a 'git' extension. 3. Copy & Paste the Repository URL and click on 'Retrieve'. 4. Choose "Master" and click "Install" 5. Enable the Extension 6. Browse to config and switch to prototypes 7. Search for fireeye and open the prototypes 8. Click "New" on the top right 9. Name your prototype and modify fireeye_fqdn with the FQDN of your Fireeye NX or CMS appliance 10. Browse back to Config and click on the "eye" symbol on the left bottom of the miner list 11. A "+" Sign appears on the right. Use it to create 3 miners. 12. First the "Miner". Choose a name like "myfireeye-miner" and select your protoype created in step 9 then click OK 13. Create the second miner the processor. Choose a name like "myfireeye-processor" and pick "stdlib.aggregatorURL" as Prototype 14. Now the last miner the "Output". Choose a name like "myfireeye-output" and pick e.g. "stdlib.feedHCGreen" as Prototype 15. Click "Commit" to safe your work. To confirm if you new miner works browse to Nodes and search for "fireeye" open your "Miner" created in step 12 and see if indicators show up in the log
edfauler/fireeye-miner
README.md
Markdown
apache-2.0
2,517
# Atelosaccharomyces catanei C.W. Dodge SPECIES #### Status ACCEPTED #### According to Index Fungorum #### Published in Medical mycology. Fungous diseases of men and other mammals 343 (1935) #### Original name Atelosaccharomyces catanei C.W. Dodge ### Remarks null
mdoering/backbone
life/Fungi/Basidiomycota/Tremellomycetes/Tremellales/Tremellaceae/Atelosaccharomyces/Atelosaccharomyces catanei/README.md
Markdown
apache-2.0
269
DELIMITER / ALTER TABLE COI_DISCLOSURE_NOTEPAD ADD (CREATE_TIMESTAMP DATE ) / ALTER TABLE COI_DISCLOSURE_NOTEPAD ADD (CREATE_USER VARCHAR(60) ) / DELIMITER ;
blackcathacker/kc.preclean
coeus-db/coeus-db-sql/src/main/resources/org/kuali/coeus/coeus-sql/current_mysql/5.1.0/tables/KC_TBL_COI_DISCLOSURE_NOTEPAD.sql
SQL
apache-2.0
161
/* * Copyright 2013-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.aws.context.annotation; import org.springframework.context.annotation.Condition; import org.springframework.context.annotation.ConditionContext; import org.springframework.core.annotation.AnnotationUtils; import org.springframework.core.type.AnnotatedTypeMetadata; import org.springframework.util.ClassUtils; import org.springframework.util.MultiValueMap; /** * @author Agim Emruli */ public class OnClassCondition implements Condition { @Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { MultiValueMap<String, Object> attributes = metadata .getAllAnnotationAttributes(ConditionalOnClass.class.getName(), true); String className = String.valueOf(attributes.get(AnnotationUtils.VALUE).get(0)); return ClassUtils.isPresent(className, context.getClassLoader()); } }
spring-cloud/spring-cloud-aws
spring-cloud-aws-context/src/main/java/org/springframework/cloud/aws/context/annotation/OnClassCondition.java
Java
apache-2.0
1,481
# [Atata Samples](https://github.com/atata-framework/atata-samples) / JSON Configuration: Multi-Environment [![Download sources](https://img.shields.io/badge/Download-sources-brightgreen.svg)](https://github.com/atata-framework/atata-samples/raw/master/_archives/JsonConfiguration.MultiEnvironment.zip) Demonstrates the way to support multiple environments using JSON configuration files. *[Download sources](https://github.com/atata-framework/atata-samples/raw/master/_archives/JsonConfiguration.MultiEnvironment.zip), run tests, check results and experiment with [Atata Framework](https://atata.io).*
atata-framework/atata-samples
JsonConfiguration.MultiEnvironment/README.md
Markdown
apache-2.0
605
//////////////////////////////////////////////////////////////////////////////// /// DISCLAIMER /// /// Copyright 2014-2016 ArangoDB GmbH, Cologne, Germany /// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and /// limitations under the License. /// /// Copyright holder is ArangoDB GmbH, Cologne, Germany /// /// @author Jan Steemann //////////////////////////////////////////////////////////////////////////////// #include "RocksDBV8Functions.h" #include "Aql/Functions.h" #include "Basics/Exceptions.h" #include "Basics/Result.h" #include "Cluster/ServerState.h" #include "RocksDBEngine/RocksDBCollection.h" #include "RocksDBEngine/RocksDBCommon.h" #include "RocksDBEngine/RocksDBEngine.h" #include "StorageEngine/EngineSelectorFeature.h" #include "V8/v8-conv.h" #include "V8/v8-globals.h" #include "V8/v8-utils.h" #include "V8/v8-vpack.h" #include "V8Server/v8-externals.h" #include "VocBase/LogicalCollection.h" #include <v8.h> using namespace arangodb; /// flush the WAL static void JS_FlushWal(v8::FunctionCallbackInfo<v8::Value> const& args) { TRI_V8_TRY_CATCH_BEGIN(isolate); v8::HandleScope scope(isolate); bool waitForSync = false; bool waitForCollector = false; bool writeShutdownFile = false; if (args.Length() > 0) { if (args[0]->IsObject()) { v8::Handle<v8::Object> obj = args[0]->ToObject(); if (obj->Has(TRI_V8_ASCII_STRING(isolate, "waitForSync"))) { waitForSync = TRI_ObjectToBoolean(obj->Get(TRI_V8_ASCII_STRING(isolate, "waitForSync"))); } if (obj->Has(TRI_V8_ASCII_STRING(isolate, "waitForCollector"))) { waitForCollector = TRI_ObjectToBoolean( obj->Get(TRI_V8_ASCII_STRING(isolate, "waitForCollector"))); } if (obj->Has(TRI_V8_ASCII_STRING(isolate, "writeShutdownFile"))) { writeShutdownFile = TRI_ObjectToBoolean( obj->Get(TRI_V8_ASCII_STRING(isolate, "writeShutdownFile"))); } } else { waitForSync = TRI_ObjectToBoolean(args[0]); if (args.Length() > 1) { waitForCollector = TRI_ObjectToBoolean(args[1]); if (args.Length() > 2) { writeShutdownFile = TRI_ObjectToBoolean(args[2]); } } } } arangodb::Result ret = static_cast<RocksDBEngine*>(EngineSelectorFeature::ENGINE)->syncWal( waitForSync, waitForCollector, writeShutdownFile); if (!ret.ok()) { THROW_ARANGO_EXCEPTION_MESSAGE(ret.errorNumber(), ret.errorMessage()); } TRI_V8_RETURN_TRUE(); TRI_V8_TRY_CATCH_END } /// this is just a stub static void JS_WaitCollectorWal( v8::FunctionCallbackInfo<v8::Value> const& args) { TRI_V8_TRY_CATCH_BEGIN(isolate); v8::HandleScope scope(isolate); if (ServerState::instance()->isCoordinator()) { TRI_V8_THROW_EXCEPTION(TRI_ERROR_NOT_IMPLEMENTED); } // this is just a stub TRI_V8_RETURN_TRUE(); TRI_V8_TRY_CATCH_END } /// this is just a stub static void JS_TransactionsWal( v8::FunctionCallbackInfo<v8::Value> const& args) { TRI_V8_TRY_CATCH_BEGIN(isolate); v8::HandleScope scope(isolate); if (ServerState::instance()->isCoordinator()) { TRI_V8_THROW_EXCEPTION(TRI_ERROR_NOT_IMPLEMENTED); } // this is just a stub TRI_V8_RETURN_TRUE(); TRI_V8_TRY_CATCH_END } /// this is just a stub static void JS_PropertiesWal(v8::FunctionCallbackInfo<v8::Value> const& args) { TRI_V8_TRY_CATCH_BEGIN(isolate); v8::HandleScope scope(isolate); if (ServerState::instance()->isCoordinator()) { TRI_V8_THROW_EXCEPTION(TRI_ERROR_NOT_IMPLEMENTED); } // this is just a stub TRI_V8_RETURN_TRUE(); TRI_V8_TRY_CATCH_END } static void JS_RecalculateCounts( v8::FunctionCallbackInfo<v8::Value> const& args) { TRI_V8_TRY_CATCH_BEGIN(isolate); v8::HandleScope scope(isolate); arangodb::LogicalCollection* collection = TRI_UnwrapClass<arangodb::LogicalCollection>(args.Holder(), WRP_VOCBASE_COL_TYPE); if (collection == nullptr) { TRI_V8_THROW_EXCEPTION_INTERNAL("cannot extract collection"); } auto physical = toRocksDBCollection(collection); v8::Handle<v8::Value> result = v8::Number::New( isolate, static_cast<double>(physical->recalculateCounts())); TRI_V8_RETURN(result); TRI_V8_TRY_CATCH_END } static void JS_CompactCollection( v8::FunctionCallbackInfo<v8::Value> const& args) { TRI_V8_TRY_CATCH_BEGIN(isolate); v8::HandleScope scope(isolate); arangodb::LogicalCollection* collection = TRI_UnwrapClass<arangodb::LogicalCollection>(args.Holder(), WRP_VOCBASE_COL_TYPE); if (collection == nullptr) { TRI_V8_THROW_EXCEPTION_INTERNAL("cannot extract collection"); } RocksDBCollection* physical = toRocksDBCollection(collection); physical->compact(); TRI_V8_RETURN_UNDEFINED(); TRI_V8_TRY_CATCH_END } static void JS_EstimateCollectionSize( v8::FunctionCallbackInfo<v8::Value> const& args) { TRI_V8_TRY_CATCH_BEGIN(isolate); v8::HandleScope scope(isolate); arangodb::LogicalCollection* collection = TRI_UnwrapClass<arangodb::LogicalCollection>(args.Holder(), WRP_VOCBASE_COL_TYPE); if (collection == nullptr) { TRI_V8_THROW_EXCEPTION_INTERNAL("cannot extract collection"); } RocksDBCollection* physical = toRocksDBCollection(collection); VPackBuilder builder; physical->estimateSize(builder); v8::Handle<v8::Value> result = TRI_VPackToV8(isolate, builder.slice()); TRI_V8_RETURN(result); TRI_V8_TRY_CATCH_END } void RocksDBV8Functions::registerResources() { ISOLATE; v8::HandleScope scope(isolate); TRI_GET_GLOBALS(); // patch ArangoCollection object v8::Handle<v8::ObjectTemplate> rt = v8::Handle<v8::ObjectTemplate>::New(isolate, v8g->VocbaseColTempl); TRI_ASSERT(!rt.IsEmpty()); TRI_AddMethodVocbase(isolate, rt, TRI_V8_ASCII_STRING(isolate, "recalculateCount"), JS_RecalculateCounts, true); TRI_AddMethodVocbase(isolate, rt, TRI_V8_ASCII_STRING(isolate, "compact"), JS_CompactCollection); TRI_AddMethodVocbase(isolate, rt, TRI_V8_ASCII_STRING(isolate, "estimatedSize"), JS_EstimateCollectionSize); // add global WAL handling functions TRI_AddGlobalFunctionVocbase(isolate, TRI_V8_ASCII_STRING(isolate, "WAL_FLUSH"), JS_FlushWal, true); TRI_AddGlobalFunctionVocbase(isolate, TRI_V8_ASCII_STRING(isolate, "WAL_WAITCOLLECTOR"), JS_WaitCollectorWal, true); TRI_AddGlobalFunctionVocbase(isolate, TRI_V8_ASCII_STRING(isolate, "WAL_PROPERTIES"), JS_PropertiesWal, true); TRI_AddGlobalFunctionVocbase(isolate, TRI_V8_ASCII_STRING(isolate, "WAL_TRANSACTIONS"), JS_TransactionsWal, true); }
hkernbach/arangodb
arangod/RocksDBEngine/RocksDBV8Functions.cpp
C++
apache-2.0
7,433
package no.haagensoftware.netty.webserver.scriptCache; import java.util.Hashtable; import java.util.List; public class ScriptHash { private static Hashtable<String, ScriptCache> scriptHash = new Hashtable<String, ScriptCache>(); public static ScriptCache updateScriptContents(String htmlFilePath, List<ScriptFile> scriptFiles, String htmlContent, long expires) { ScriptCache cacheFromHash = scriptHash.get(htmlFilePath); if (cacheFromHash != null) { //Remove the old script cache scriptHash.remove(htmlFilePath); } cacheFromHash = new ScriptCache(htmlFilePath, scriptFiles, expires, htmlContent); scriptHash.put(htmlFilePath, cacheFromHash); return cacheFromHash; } public static ScriptCache getScriptCache(String htmlFilePath) { return scriptHash.get(htmlFilePath); } }
joachimhs/Embriak
backend/src/main/java/no/haagensoftware/netty/webserver/scriptCache/ScriptHash.java
Java
apache-2.0
806
package de.slackspace.openkeepass.parser; import java.io.ByteArrayOutputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.text.SimpleDateFormat; import java.util.List; import java.util.TimeZone; import java.util.UUID; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import de.slackspace.openkeepass.crypto.Salsa20; import de.slackspace.openkeepass.domain.Entry; import de.slackspace.openkeepass.domain.Group; import de.slackspace.openkeepass.domain.KeePassFile; import de.slackspace.openkeepass.domain.Times; import de.slackspace.openkeepass.util.ByteUtils; import de.slackspace.openkeepass.xml.KeePassDatabaseXmlParser; public class KeePassDatabaseXmlParserTest { private byte[] protectedStreamKey = ByteUtils.hexStringToByteArray("ec77a2169769734c5d26e5341401f8d7b11052058f8455d314879075d0b7e257"); private static SimpleDateFormat dateFormatter; @BeforeClass public static void init() { // make sure we use UTC time dateFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); dateFormatter.setTimeZone(TimeZone.getTimeZone("GMT")); } @Test public void whenInputIsValidKeePassXmlShouldParseFileAndReturnCorrectMetadata() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); Assert.assertEquals("KeePass", keePassFile.getMeta().getGenerator()); Assert.assertEquals("TestDatabase", keePassFile.getMeta().getDatabaseName()); Assert.assertEquals("Just a sample db", keePassFile.getMeta().getDatabaseDescription()); Assert.assertEquals("2014-11-22 18:59:39", dateFormatter.format(keePassFile.getMeta().getDatabaseNameChanged().getTime())); Assert.assertEquals("2014-11-22 18:59:39", dateFormatter.format(keePassFile.getMeta().getDatabaseDescriptionChanged().getTime())); Assert.assertEquals(365, keePassFile.getMeta().getMaintenanceHistoryDays()); Assert.assertEquals(true, keePassFile.getMeta().getRecycleBinEnabled()); Assert.assertEquals(UUID.fromString("00000000-0000-0000-0000-00000000"), keePassFile.getMeta().getRecycleBinUuid()); Assert.assertEquals("2014-11-22 18:58:56", dateFormatter.format(keePassFile.getMeta().getRecycleBinChanged().getTime())); Assert.assertEquals(10, keePassFile.getMeta().getHistoryMaxItems()); Assert.assertEquals(6291456, keePassFile.getMeta().getHistoryMaxSize()); } @Test public void whenInputIsValidKeePassXmlShouldParseFileAndReturnCorrectGroups() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); List<Group> groups = keePassFile.getTopGroups(); Assert.assertNotNull(groups); Assert.assertEquals(6, groups.size()); Assert.assertEquals("General", groups.get(0).getName()); Assert.assertEquals(UUID.fromString("16abcc27-cca3-9544-8012-df4e98d4a3d8"), groups.get(0).getUuid()); Assert.assertEquals("Windows", groups.get(1).getName()); Assert.assertEquals(UUID.fromString("ad7b7b0f-e10c-ff4a-96d6-b80f0788399f"), groups.get(1).getUuid()); Assert.assertEquals("Network", groups.get(2).getName()); Assert.assertEquals(UUID.fromString("0f074068-a9f8-b44c-9716-5539ebfd9405"), groups.get(2).getUuid()); Assert.assertEquals("Internet", groups.get(3).getName()); Assert.assertEquals(UUID.fromString("08e814ac-fb79-3f4e-bbe8-37b2667fdab9"), groups.get(3).getUuid()); Assert.assertEquals("eMail", groups.get(4).getName()); Assert.assertEquals(UUID.fromString("ff159f39-f9c2-ea48-bbea-c361ad947baf"), groups.get(4).getUuid()); Assert.assertEquals("Homebanking", groups.get(5).getName()); Assert.assertEquals(UUID.fromString("45d8eddb-5265-6b4f-84e5-0f449491f0d6"), groups.get(5).getUuid()); } @Test public void whenInputIsValidKeePassXmlShouldParseFileAndReturnCorrectEntries() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); List<Entry> entries = keePassFile.getTopEntries(); Assert.assertNotNull(entries); Assert.assertEquals(2, entries.size()); Assert.assertEquals(UUID.fromString("9626dd2d-6f3c-714e-81be-b3d096f2aa30"), entries.get(0).getUuid()); Assert.assertEquals(5, entries.get(0).getProperties().size()); Assert.assertEquals("Sample Entry", entries.get(0).getTitle()); Assert.assertEquals("http://keepass.info/", entries.get(0).getUrl()); Assert.assertEquals("User Name", entries.get(0).getUsername()); Assert.assertEquals("Notes", entries.get(0).getNotes()); Assert.assertEquals("Password", entries.get(0).getPassword()); } @Test public void whenUsingGetEntriesShouldReturnAllEntries() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); List<Entry> entries = keePassFile.getEntries(); Assert.assertEquals(3, entries.size()); } @Test public void whenUsingGetEntriesByTitleExactlyShouldReturnAllEntriesWithGivenTitle() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); List<Entry> entries = keePassFile.getEntriesByTitle("Sample Entry", true); Assert.assertEquals(1, entries.size()); Assert.assertEquals("Sample Entry", entries.get(0).getTitle()); } @Test public void whenUsingGetEntriesByTitleLooselyShouldReturnAllEntriesWithGivenTitle() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); List<Entry> entries = keePassFile.getEntriesByTitle("Sample Entry", false); Assert.assertEquals(2, entries.size()); Assert.assertEquals("Sample Entry", entries.get(0).getTitle()); Assert.assertEquals("Sample Entry #2", entries.get(1).getTitle()); } @Test public void whenUsingGetEntriesByTitleLooselyButNothingMatchesShouldReturnEmptyList() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); List<Entry> entries = keePassFile.getEntriesByTitle("abcdefg", false); Assert.assertEquals(0, entries.size()); } private KeePassFile parseKeePassXml() throws FileNotFoundException { FileInputStream fileInputStream = new FileInputStream("target/test-classes/testDatabase_decrypted.xml"); KeePassFile keePassFile = new KeePassDatabaseXmlParser().fromXml(fileInputStream, Salsa20.createInstance(protectedStreamKey)); return keePassFile; } @Test public void whenWritingKeePassFileShouldBeAbleToReadItAgain() throws IOException { // Read decrypted and write again FileInputStream fileInputStream = new FileInputStream("target/test-classes/testDatabase_decrypted.xml"); KeePassDatabaseXmlParser parser = new KeePassDatabaseXmlParser(); KeePassFile keePassFile = parser.fromXml(fileInputStream, Salsa20.createInstance(protectedStreamKey)); ByteArrayOutputStream outputStream = parser.toXml(keePassFile, Salsa20.createInstance(protectedStreamKey)); OutputStream fileOutputStream = new FileOutputStream("target/test-classes/testDatabase_decrypted2.xml"); outputStream.writeTo(fileOutputStream); // Read written file FileInputStream writtenInputStream = new FileInputStream("target/test-classes/testDatabase_decrypted2.xml"); KeePassFile writtenKeePassFile = parser.fromXml(writtenInputStream, Salsa20.createInstance(protectedStreamKey)); Assert.assertEquals("Password", writtenKeePassFile.getEntryByTitle("Sample Entry").getPassword()); } @Test public void whenUsingGetGroupsShouldReturnAllGroups() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); List<Group> groups = keePassFile.getGroups(); Assert.assertEquals(7, groups.size()); } @Test public void whenUsingGetGroupsByNameExactlyShouldReturnGroupsWithGivenName() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); List<Group> groups = keePassFile.getGroupsByName("Windows", true); Assert.assertEquals(1, groups.size()); Assert.assertEquals("Windows", groups.get(0).getName()); } @Test public void whenUsingGetGroupsByNameLooslyShouldReturnGroupsWithGivenName() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); List<Group> groups = keePassFile.getGroupsByName("net", false); Assert.assertEquals(2, groups.size()); Assert.assertEquals("Network", groups.get(0).getName()); Assert.assertEquals("Internet", groups.get(1).getName()); } @Test public void whenUsingGetGroupByNameShouldReturnOneGroup() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); Group group = keePassFile.getGroupByName("Internet"); Assert.assertEquals("Internet", group.getName()); } @Test public void whenUsingGetTimesShouldReturnCorrectlyParsedTimes() throws FileNotFoundException { KeePassFile keePassFile = parseKeePassXml(); Group group = keePassFile.getGroupByName("testDatabase"); Times times = group.getTimes(); Assert.assertEquals(false, times.expires()); Assert.assertEquals("2014-11-22 18:58:56", dateFormatter.format(times.getLastModificationTime().getTime())); Assert.assertEquals("2014-11-22 18:58:56", dateFormatter.format(times.getCreationTime().getTime())); Assert.assertEquals("2014-11-22 18:58:13", dateFormatter.format(times.getExpiryTime().getTime())); Assert.assertEquals("2014-11-22 18:59:53", dateFormatter.format(times.getLastAccessTime().getTime())); Assert.assertEquals("2014-11-22 18:58:56", dateFormatter.format(times.getLastModificationTime().getTime())); Assert.assertEquals("2014-11-22 18:58:56", dateFormatter.format(times.getLocationChanged().getTime())); Assert.assertEquals(8, times.getUsageCount()); } }
knowhowlab/openkeepass
src/test/java/de/slackspace/openkeepass/parser/KeePassDatabaseXmlParserTest.java
Java
apache-2.0
9,455
/** * Copyright 2011-2017 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Extension points for DMDL Java emitter. */ package com.asakusafw.dmdl.java.emitter.driver;
cocoatomo/asakusafw
dmdl-project/asakusa-dmdl-java/src/main/java/com/asakusafw/dmdl/java/emitter/driver/package-info.java
Java
apache-2.0
712
// // ======================================================================== // Copyright (c) 1995-2013 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.spdy.frames; import java.io.InputStream; import java.nio.ByteBuffer; import java.security.KeyStore; import java.security.cert.Certificate; import org.eclipse.jetty.spdy.StandardByteBufferPool; import org.eclipse.jetty.spdy.StandardCompressionFactory; import org.eclipse.jetty.spdy.api.SPDY; import org.eclipse.jetty.spdy.generator.Generator; import org.eclipse.jetty.spdy.parser.Parser; import org.eclipse.jetty.util.resource.Resource; import org.junit.Assert; import org.junit.Test; public class CredentialGenerateParseTest { @Test public void testGenerateParse() throws Exception { short slot = 1; byte[] proof = new byte[]{0, 1, 2}; Certificate[] temp = loadCertificates(); Certificate[] certificates = new Certificate[temp.length * 2]; System.arraycopy(temp, 0, certificates, 0, temp.length); System.arraycopy(temp, 0, certificates, temp.length, temp.length); CredentialFrame frame1 = new CredentialFrame(SPDY.V3, slot, proof, certificates); Generator generator = new Generator(new StandardByteBufferPool(), new StandardCompressionFactory().newCompressor()); ByteBuffer buffer = generator.control(frame1); Assert.assertNotNull(buffer); TestSPDYParserListener listener = new TestSPDYParserListener(); Parser parser = new Parser(new StandardCompressionFactory().newDecompressor()); parser.addListener(listener); parser.parse(buffer); ControlFrame frame2 = listener.getControlFrame(); Assert.assertNotNull(frame2); Assert.assertEquals(ControlFrameType.CREDENTIAL, frame2.getType()); CredentialFrame credential = (CredentialFrame)frame2; Assert.assertEquals(SPDY.V3, credential.getVersion()); Assert.assertEquals(0, credential.getFlags()); Assert.assertEquals(slot, credential.getSlot()); Assert.assertArrayEquals(proof, credential.getProof()); Assert.assertArrayEquals(certificates, credential.getCertificateChain()); } @Test public void testGenerateParseOneByteAtATime() throws Exception { short slot = 1; byte[] proof = new byte[]{0, 1, 2}; Certificate[] certificates = loadCertificates(); CredentialFrame frame1 = new CredentialFrame(SPDY.V3, slot, proof, certificates); Generator generator = new Generator(new StandardByteBufferPool(), new StandardCompressionFactory().newCompressor()); ByteBuffer buffer = generator.control(frame1); Assert.assertNotNull(buffer); TestSPDYParserListener listener = new TestSPDYParserListener(); Parser parser = new Parser(new StandardCompressionFactory().newDecompressor()); parser.addListener(listener); while (buffer.hasRemaining()) parser.parse(ByteBuffer.wrap(new byte[]{buffer.get()})); ControlFrame frame2 = listener.getControlFrame(); Assert.assertNotNull(frame2); Assert.assertEquals(ControlFrameType.CREDENTIAL, frame2.getType()); CredentialFrame credential = (CredentialFrame)frame2; Assert.assertEquals(SPDY.V3, credential.getVersion()); Assert.assertEquals(0, credential.getFlags()); Assert.assertEquals(slot, credential.getSlot()); Assert.assertArrayEquals(proof, credential.getProof()); Assert.assertArrayEquals(certificates, credential.getCertificateChain()); } private Certificate[] loadCertificates() throws Exception { KeyStore keyStore = KeyStore.getInstance("JKS"); InputStream keyStoreStream = Resource.newResource("src/test/resources/keystore.jks").getInputStream(); keyStore.load(keyStoreStream, "storepwd".toCharArray()); return keyStore.getCertificateChain("mykey"); } }
whiteley/jetty8
jetty-spdy/spdy-core/src/test/java/org/eclipse/jetty/spdy/frames/CredentialGenerateParseTest.java
Java
apache-2.0
4,598
package com.fernandocs.firebase.quickstart; import android.app.ProgressDialog; import android.support.v7.app.AppCompatActivity; import com.google.firebase.auth.FirebaseAuth; public class BaseActivity extends AppCompatActivity { private ProgressDialog mProgressDialog; public void showProgressDialog() { if (mProgressDialog == null) { mProgressDialog = new ProgressDialog(this); mProgressDialog.setCancelable(false); mProgressDialog.setMessage("Loading..."); } mProgressDialog.show(); } public void hideProgressDialog() { if (mProgressDialog != null && mProgressDialog.isShowing()) { mProgressDialog.dismiss(); } } public String getUid() { return FirebaseAuth.getInstance().getCurrentUser().getUid(); } }
fernandocs/firebase-quickstart-android
QuickStart/app/src/main/java/com/fernandocs/firebase/quickstart/BaseActivity.java
Java
apache-2.0
837
// Copyright 2016 The Oppia Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Factory for creating and mutating instances of frontend * collection rights domain objects. */ import cloneDeep from 'lodash/cloneDeep'; import { downgradeInjectable } from '@angular/upgrade/static'; import { Injectable } from '@angular/core'; export interface CollectionRightsBackendDict { 'collection_id': number; 'can_edit': boolean; 'can_unpublish': boolean; 'is_private': boolean; 'owner_names': string[]; } export class CollectionRights { _collectionId: number; _canEdit: boolean; _canUnpublish: boolean; _isPrivate: boolean; _ownerNames: string[]; constructor(collectionRightsObject: CollectionRightsBackendDict) { this._collectionId = collectionRightsObject.collection_id; this._canEdit = collectionRightsObject.can_edit; this._canUnpublish = collectionRightsObject.can_unpublish; this._isPrivate = collectionRightsObject.is_private; this._ownerNames = collectionRightsObject.owner_names; } getCollectionId(): number { return this._collectionId; } // Returns true if the the user can edit the collection. This property is // immutable. canEdit(): boolean { return this._canEdit; } // Returns true if the user can unpublish the collection. canUnpublish(): boolean { return this._canUnpublish; } // Returns true if the collection is private. isPrivate(): boolean { return this._isPrivate; } // Returns true if the collection is public. isPublic(): boolean { return !this._isPrivate; } // Sets isPrivate to false only if the user can edit the corresponding // collection. setPublic(): void { if (this.canEdit()) { this._isPrivate = false; } else { throw new Error('User is not allowed to edit this collection.'); } } // Sets isPrivate to true only if canUnpublish and canEdit are both true. setPrivate(): void { if (this.canEdit() && this.canUnpublish()) { this._isPrivate = true; } else { throw new Error('User is not allowed to unpublish this collection.'); } } // Returns the owner names of the collection. This property is immutable. getOwnerNames(): string[] { return cloneDeep(this._ownerNames); } // Returns the reference to the internal ownerNames array; this function is // only meant to be used for Angular bindings and should never be used in // code. Please use getOwnerNames() and related functions, instead. Please // also be aware this exposes internal state of the collection rights domain // object, so changes to the array itself may internally break the domain // object. getBindableOwnerNames(): string[] { return this._ownerNames; } // Reassigns all values within this collection to match the existing // collection rights. This is performed as a deep copy such that none of the // internal, bindable objects are changed within this collection rights. // Note that the collection nodes within this collection will be completely // redefined as copies from the specified collection rights. copyFromCollectionRights(otherCollectionRights: CollectionRights): void { this._collectionId = otherCollectionRights.getCollectionId(); this._canEdit = otherCollectionRights.canEdit(); this._isPrivate = otherCollectionRights.isPrivate(); this._canUnpublish = otherCollectionRights.canUnpublish(); this._ownerNames = otherCollectionRights.getOwnerNames(); } } @Injectable({ providedIn: 'root' }) export class CollectionRightsObjectFactory { // Static class methods. Note that "this" is not available in static // contexts. This function takes a JSON object which represents a backend // collection python dict. create( collectionRightsBackendObject: CollectionRightsBackendDict): CollectionRights { return new CollectionRights(cloneDeep(collectionRightsBackendObject)); } // Create a new, empty collection rights object. This is not guaranteed to // pass validation tests. createEmptyCollectionRights(): CollectionRights { return new CollectionRights({ owner_names: [], collection_id: null, can_edit: null, can_unpublish: null, is_private: null }); } } angular.module('oppia').factory( 'CollectionRightsObjectFactory', downgradeInjectable(CollectionRightsObjectFactory));
prasanna08/oppia
core/templates/domain/collection/CollectionRightsObjectFactory.ts
TypeScript
apache-2.0
4,943
<!DOCTYPE html > <html> <head> <title></title> <meta name="description" content="" /> <meta name="keywords" content="" /> <meta http-equiv="content-type" content="text/html; charset=UTF-8" /> <link href="../lib/ref-index.css" media="screen" type="text/css" rel="stylesheet" /> <script type="text/javascript" src="../lib/jquery.js"></script> </head> <body><div class="entry"> <div class="name">Scheduler</div> <div class="occurrences"><a href="../rx/lang/scala/package.html" class="extype" name="rx.lang.scala">scala</a> </div> </div><div class="entry"> <div class="name">SerialSubscription</div> <div class="occurrences"><a href="../rx/lang/scala/subscriptions/package.html" class="extype" name="rx.lang.scala.subscriptions">subscriptions</a> </div> </div><div class="entry"> <div class="name">SerializedSubject</div> <div class="occurrences"><a href="../rx/lang/scala/subjects/package.html" class="extype" name="rx.lang.scala.subjects">subjects</a> </div> </div><div class="entry"> <div class="name">Subject</div> <div class="occurrences"><a href="../rx/lang/scala/package.html" class="extype" name="rx.lang.scala">scala</a> </div> </div><div class="entry"> <div class="name">Subscriber</div> <div class="occurrences"><a href="../rx/lang/scala/package.html" class="extype" name="rx.lang.scala">scala</a> </div> </div><div class="entry"> <div class="name">Subscription</div> <div class="occurrences"><a href="../rx/lang/scala/package.html" class="extype" name="rx.lang.scala">scala</a> </div> </div><div class="entry"> <div class="name">sample</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">scala</div> <div class="occurrences"><a href="../rx/lang/package.html" class="extype" name="rx.lang">lang</a> </div> </div><div class="entry"> <div class="name">scalaAction1ToOnSubscribe</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaBooleanFunction1ToRxBooleanFunc1</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaBooleanFunction2ToRxBooleanFunc1</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaByNameParamToFunc0</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaFuncNToRxFuncN</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaFunction0ProducingUnitToAction0</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaFunction0ToRxFunc0</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaFunction1ProducingUnitToAction1</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaFunction1ToRxFunc1</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaFunction2ToRxFunc2</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaFunction3ToRxFunc3</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaFunction4ToRxFunc4</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">scalaSchedulerToJavaScheduler</div> <div class="occurrences"><a href="../rx/lang/scala/JavaConversions$.html" class="extype" name="rx.lang.scala.JavaConversions">JavaConversions</a> </div> </div><div class="entry"> <div class="name">scalaWorkerToJavaWorker</div> <div class="occurrences"><a href="../rx/lang/scala/JavaConversions$.html" class="extype" name="rx.lang.scala.JavaConversions">JavaConversions</a> </div> </div><div class="entry"> <div class="name">scan</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">schedule</div> <div class="occurrences"><a href="../rx/lang/scala/Worker.html" class="extype" name="rx.lang.scala.Worker">Worker</a> </div> </div><div class="entry"> <div class="name">schedulePeriodically</div> <div class="occurrences"><a href="../rx/lang/scala/Worker.html" class="extype" name="rx.lang.scala.Worker">Worker</a> </div> </div><div class="entry"> <div class="name">scheduleRec</div> <div class="occurrences"><a href="../rx/lang/scala/Worker.html" class="extype" name="rx.lang.scala.Worker">Worker</a> </div> </div><div class="entry"> <div class="name">schedulerActionToFunc2</div> <div class="occurrences"><a href="../rx/lang/scala/ImplicitFunctionConversions$.html" class="extype" name="rx.lang.scala.ImplicitFunctionConversions">ImplicitFunctionConversions</a> </div> </div><div class="entry"> <div class="name">schedulers</div> <div class="occurrences"><a href="../rx/lang/scala/package.html" class="extype" name="rx.lang.scala">scala</a> </div> </div><div class="entry"> <div class="name">sequenceEqual</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">sequenceEqualWith</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">serialize</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">setProducer</div> <div class="occurrences"><a href="../rx/lang/scala/Subscriber.html" class="extype" name="rx.lang.scala.Subscriber">Subscriber</a> </div> </div><div class="entry"> <div class="name">share</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">single</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> <a href="../rx/lang/scala/observables/BlockingObservable.html" class="extype" name="rx.lang.scala.observables.BlockingObservable">BlockingObservable</a> </div> </div><div class="entry"> <div class="name">singleOption</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> <a href="../rx/lang/scala/observables/BlockingObservable.html" class="extype" name="rx.lang.scala.observables.BlockingObservable">BlockingObservable</a> </div> </div><div class="entry"> <div class="name">singleOrElse</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> <a href="../rx/lang/scala/observables/BlockingObservable.html" class="extype" name="rx.lang.scala.observables.BlockingObservable">BlockingObservable</a> </div> </div><div class="entry"> <div class="name">size</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">sliding</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">slidingBuffer</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">source</div> <div class="occurrences"><a href="../rx/lang/scala/package$$ObservableExtensions.html" class="extype" name="rx.lang.scala.ObservableExtensions">ObservableExtensions</a> </div> </div><div class="entry"> <div class="name">subjects</div> <div class="occurrences"><a href="../rx/lang/scala/package.html" class="extype" name="rx.lang.scala">scala</a> </div> </div><div class="entry"> <div class="name">subscribe</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> <a href="../rx/lang/scala/observables/BlockingObservable.html" class="extype" name="rx.lang.scala.observables.BlockingObservable">BlockingObservable</a> </div> </div><div class="entry"> <div class="name">subscribeOn</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">subscription</div> <div class="occurrences"><a href="../rx/lang/scala/subscriptions/MultipleAssignmentSubscription.html" class="extype" name="rx.lang.scala.subscriptions.MultipleAssignmentSubscription">MultipleAssignmentSubscription</a> <a href="../rx/lang/scala/subscriptions/SerialSubscription.html" class="extype" name="rx.lang.scala.subscriptions.SerialSubscription">SerialSubscription</a> </div> </div><div class="entry"> <div class="name">subscription_=</div> <div class="occurrences"><a href="../rx/lang/scala/subscriptions/MultipleAssignmentSubscription.html" class="extype" name="rx.lang.scala.subscriptions.MultipleAssignmentSubscription">MultipleAssignmentSubscription</a> <a href="../rx/lang/scala/subscriptions/SerialSubscription.html" class="extype" name="rx.lang.scala.subscriptions.SerialSubscription">SerialSubscription</a> </div> </div><div class="entry"> <div class="name">subscriptions</div> <div class="occurrences"><a href="../rx/lang/scala/package.html" class="extype" name="rx.lang.scala">scala</a> </div> </div><div class="entry"> <div class="name">sum</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">switch</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> <a href="../rx/lang/scala/observables/ErrorDelayingObservable.html" class="extype" name="rx.lang.scala.observables.ErrorDelayingObservable">ErrorDelayingObservable</a> </div> </div><div class="entry"> <div class="name">switchIfEmpty</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> </div> </div><div class="entry"> <div class="name">switchMap</div> <div class="occurrences"><a href="../rx/lang/scala/Observable.html" class="extype" name="rx.lang.scala.Observable">Observable</a> <a href="../rx/lang/scala/observables/ErrorDelayingObservable.html" class="extype" name="rx.lang.scala.observables.ErrorDelayingObservable">ErrorDelayingObservable</a> </div> </div></body> </html>
ReactiveX/reactivex.github.io
rxscala/scaladoc/index/index-s.html
HTML
apache-2.0
13,424
/** * ProductType.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.dfp.axis.v201602; public class ProductType implements java.io.Serializable { private java.lang.String _value_; private static java.util.HashMap _table_ = new java.util.HashMap(); // Constructor protected ProductType(java.lang.String value) { _value_ = value; _table_.put(_value_,this); } public static final java.lang.String _DFP = "DFP"; public static final java.lang.String _OFFLINE = "OFFLINE"; public static final java.lang.String _NON_DFP = "NON_DFP"; public static final java.lang.String _UNKNOWN = "UNKNOWN"; public static final ProductType DFP = new ProductType(_DFP); public static final ProductType OFFLINE = new ProductType(_OFFLINE); public static final ProductType NON_DFP = new ProductType(_NON_DFP); public static final ProductType UNKNOWN = new ProductType(_UNKNOWN); public java.lang.String getValue() { return _value_;} public static ProductType fromValue(java.lang.String value) throws java.lang.IllegalArgumentException { ProductType enumeration = (ProductType) _table_.get(value); if (enumeration==null) throw new java.lang.IllegalArgumentException(); return enumeration; } public static ProductType fromString(java.lang.String value) throws java.lang.IllegalArgumentException { return fromValue(value); } public boolean equals(java.lang.Object obj) {return (obj == this);} public int hashCode() { return toString().hashCode();} public java.lang.String toString() { return _value_;} public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);} public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.EnumSerializer( _javaType, _xmlType); } public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.EnumDeserializer( _javaType, _xmlType); } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(ProductType.class); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201602", "ProductType")); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } }
gawkermedia/googleads-java-lib
modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201602/ProductType.java
Java
apache-2.0
2,945
namespace BlasteroidXLib.ObjLib.Entities.Equipment.Powerups { using BlasteroidXLib.ObjLib.Entities.Ships; public class ShieldPowerup : Powerup { public ShieldPowerup(int durationInMs, double multipler) : base(durationInMs, multipler) { } public override void ApplyPowerup(Ship ship) { ship.Shield *= this.Multiplier; } public override void RemovePowerup(Ship ship) { } } }
michaelleinweaver/BlasteroidX
BlasteroidXLib/ObjLib/Entities/Equipment/Powerups/ShieldPowerup.cs
C#
apache-2.0
491
package com.enioka.jqm.tools; import java.io.File; import java.io.IOException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.enioka.jqm.jdbc.DbConn; import com.enioka.jqm.model.Node; /** * A mode of deployment where a directory is scanned for job definitions. <br> * Only used in single-node Docker mode. */ class DirectoryScanner implements Runnable { private static Logger jqmlogger = LoggerFactory.getLogger(DirectoryScanner.class); private static final int PERIOD_MS = 10000; private final File baseScanDirectory; private final Node node; private Semaphore loop = new Semaphore(0); private boolean run = true; private Thread localThread = null; DirectoryScanner(String path, Node node) { baseScanDirectory = new File(path); if (baseScanDirectory.exists() && !baseScanDirectory.isDirectory()) { throw new JqmInitError("Cannot scan a file - need a directory. " + baseScanDirectory.getAbsolutePath()); } if (!baseScanDirectory.exists() && !baseScanDirectory.mkdir()) { throw new JqmInitError("Cannot create directory " + baseScanDirectory.getAbsolutePath()); } this.node = node; } void stop() { jqmlogger.info("Directory scanner has received a stop request"); if (this.run) { this.run = false; if (this.localThread != null) { this.localThread.interrupt(); } } } void forceLoop() { this.loop.release(1); } @Override public void run() { Thread.currentThread().setName("DIRECTORY_SCANNER;polling directory;"); jqmlogger.info("Start of the directory scanner"); this.localThread = Thread.currentThread(); while (true) { try { loop.tryAcquire(PERIOD_MS, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { run = false; } if (!run || Thread.currentThread().isInterrupted()) { break; } scan(); } jqmlogger.info("End of the directory scanner"); } private void scan() { for (File subDir : baseScanDirectory.listFiles()) { if (!subDir.isDirectory() || !subDir.canExecute()) { jqmlogger.warn("Ignoring non-executable-directory file in deployment scanner: " + subDir.getAbsolutePath()); continue; } File xml = null; int fileCount = 0; outerloop: for (File subFile : subDir.listFiles()) { fileCount++; if (subFile.getName().endsWith(".xml")) { if (xml == null) { xml = subFile; } else { jqmlogger.warn( "There must be a single deployment descriptor per single deplopyment unit: " + subDir.getAbsolutePath()); continue outerloop; } } } if (xml == null) { if (fileCount > 0) { jqmlogger.warn("Files are present, but no deployment descriptor inside " + subDir.getAbsolutePath()); } continue; } // We have a deployment unit! Just deploy it. try { FileUtils.copyDirectory(subDir, new File(node.getRepo(), subDir.getName())); } catch (IOException e) { jqmlogger.error("Could not copy deployment unit to job definition repository", e); continue; } try { importDeploymentUnit(new File(node.getRepo(), new File(subDir.getName(), xml.getName()).getPath())); } catch (Exception e) { jqmlogger.error("Could not import deployment descriptor " + xml.getName(), e); continue; } // If here, correctly assimilated. Remove the directory content. try { FileUtils.cleanDirectory(subDir); } catch (IOException e) { jqmlogger.warn("Could not clean scanner directory " + subDir, e); continue; } jqmlogger.info("Deployment unit correctly processed: " + subDir.getName()); } } private void importDeploymentUnit(File deploymentDescriptor) { DbConn cnx = null; try { cnx = Helpers.getNewDbSession(); // Target remapped path is relative to repository root. XmlJobDefParser.parse(deploymentDescriptor.getAbsolutePath(), cnx, deploymentDescriptor.getParentFile().getName()); } finally { Helpers.closeQuietly(cnx); } } }
enioka/jqm
jqm-all/jqm-service/src/main/java/com/enioka/jqm/tools/DirectoryScanner.java
Java
apache-2.0
5,326
import gevent import socket from vnc_api.vnc_api import * from cfgm_common.vnc_kombu import VncKombuClient from config_db import * from cfgm_common.dependency_tracker import DependencyTracker from reaction_map import REACTION_MAP import svc_monitor class RabbitConnection(object): _REACTION_MAP = REACTION_MAP def __init__(self, logger, args=None): self._args = args self.logger = logger def _connect_rabbit(self): rabbit_server = self._args.rabbit_server rabbit_port = self._args.rabbit_port rabbit_user = self._args.rabbit_user rabbit_password = self._args.rabbit_password rabbit_vhost = self._args.rabbit_vhost rabbit_ha_mode = self._args.rabbit_ha_mode self._db_resync_done = gevent.event.Event() q_name = 'svc_mon.%s' % (socket.gethostname()) self._vnc_kombu = VncKombuClient(rabbit_server, rabbit_port, rabbit_user, rabbit_password, rabbit_vhost, rabbit_ha_mode, q_name, self._vnc_subscribe_callback, self.logger.log) def _vnc_subscribe_callback(self, oper_info): self._db_resync_done.wait() try: self._vnc_subscribe_actions(oper_info) except Exception: svc_monitor.cgitb_error_log(self) def _vnc_subscribe_actions(self, oper_info): msg = "Notification Message: %s" % (pformat(oper_info)) self.logger.log_debug(msg) obj_type = oper_info['type'].replace('-', '_') obj_class = DBBaseSM.get_obj_type_map().get(obj_type) if obj_class is None: return if oper_info['oper'] == 'CREATE': obj_dict = oper_info['obj_dict'] obj_id = oper_info['uuid'] obj = obj_class.locate(obj_id) dependency_tracker = DependencyTracker( DBBaseSM.get_obj_type_map(), self._REACTION_MAP) dependency_tracker.evaluate(obj_type, obj) elif oper_info['oper'] == 'UPDATE': obj_id = oper_info['uuid'] obj = obj_class.get(obj_id) old_dt = None if obj is not None: old_dt = DependencyTracker( DBBaseSM.get_obj_type_map(), self._REACTION_MAP) old_dt.evaluate(obj_type, obj) else: obj = obj_class.locate(obj_id) obj.update() dependency_tracker = DependencyTracker( DBBaseSM.get_obj_type_map(), self._REACTION_MAP) dependency_tracker.evaluate(obj_type, obj) if old_dt: for resource, ids in old_dt.resources.items(): if resource not in dependency_tracker.resources: dependency_tracker.resources[resource] = ids else: dependency_tracker.resources[resource] = list( set(dependency_tracker.resources[resource]) | set(ids)) elif oper_info['oper'] == 'DELETE': obj_id = oper_info['uuid'] obj = obj_class.get(obj_id) if obj is None: return dependency_tracker = DependencyTracker( DBBaseSM.get_obj_type_map(), self._REACTION_MAP) dependency_tracker.evaluate(obj_type, obj) obj_class.delete(obj_id) else: # unknown operation self.logger.log_error('Unknown operation %s' % oper_info['oper']) return if obj is None: self.logger.log_error('Error while accessing %s uuid %s' % ( obj_type, obj_id)) return for res_type, res_id_list in dependency_tracker.resources.items(): if not res_id_list: continue cls = DBBaseSM.get_obj_type_map().get(res_type) if cls is None: continue for res_id in res_id_list: res_obj = cls.get(res_id) if res_obj is not None: res_obj.evaluate()
sajuptpm/contrail-controller
src/config/svc-monitor/svc_monitor/rabbit.py
Python
apache-2.0
4,079
# Copyright 2014 - Mirantis, Inc. # Copyright 2020 Nokia Software. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_config import cfg from mistral.db.v2 import api as db_api from mistral.lang import parser as spec_parser from mistral.services import workbooks as wb_service from mistral.tests.unit import base # Use the set_default method to set value otherwise in certain test cases # the change in value is not permanent. cfg.CONF.set_default('auth_enable', False, group='pecan') WORKBOOK = """ --- version: '2.0' name: my_wb tags: [test] actions: concat: base: std.echo base-input: output: "{$.str1}{$.str2}" workflows: wf1: #Sample Comment 1 type: reverse tags: [wf_test] input: - param1 output: result: "{$.result}" tasks: task1: action: std.echo output="{$.param1}" publish: result: "{$}" wf2: type: direct output: result: "{$.result}" tasks: task1: workflow: my_wb.wf1 param1='Hi' task_name='task1' publish: result: "The result of subworkflow is '{$.final_result}'" """ WORKBOOK_WF1_DEFINITION = """wf1: #Sample Comment 1 type: reverse tags: [wf_test] input: - param1 output: result: "{$.result}" tasks: task1: action: std.echo output="{$.param1}" publish: result: "{$}" """ WORKBOOK_WF2_DEFINITION = """wf2: type: direct output: result: "{$.result}" tasks: task1: workflow: my_wb.wf1 param1='Hi' task_name='task1' publish: result: "The result of subworkflow is '{$.final_result}'" """ UPDATED_WORKBOOK = """ --- version: '2.0' name: my_wb tags: [test] actions: concat: base: std.echo base-input: output: "{$.str1}{$.str2}" workflows: wf1: type: direct output: result: "{$.result}" tasks: task1: workflow: my_wb.wf2 param1='Hi' task_name='task1' publish: result: "The result of subworkflow is '{$.final_result}'" wf2: type: reverse input: - param1 output: result: "{$.result}" tasks: task1: action: std.echo output="{$.param1}" publish: result: "{$}" """ UPDATED_WORKBOOK_WF1_DEFINITION = """wf1: type: direct output: result: "{$.result}" tasks: task1: workflow: my_wb.wf2 param1='Hi' task_name='task1' publish: result: "The result of subworkflow is '{$.final_result}'" """ UPDATED_WORKBOOK_WF2_DEFINITION = """wf2: type: reverse input: - param1 output: result: "{$.result}" tasks: task1: action: std.echo output="{$.param1}" publish: result: "{$}" """ ACTION_DEFINITION = """concat: base: std.echo base-input: output: "{$.str1}{$.str2}" """ class WorkbookServiceTest(base.DbTestCase): def test_create_workbook(self): namespace = 'test_workbook_service_0123_namespace' wb_db = wb_service.create_workbook_v2(WORKBOOK, namespace=namespace) self.assertIsNotNone(wb_db) self.assertEqual('my_wb', wb_db.name) self.assertEqual(namespace, wb_db.namespace) self.assertEqual(WORKBOOK, wb_db.definition) self.assertIsNotNone(wb_db.spec) self.assertListEqual(['test'], wb_db.tags) db_actions = db_api.get_action_definitions( name='my_wb.concat', namespace=namespace ) self.assertEqual(1, len(db_actions)) # Action. action_db = self._assert_single_item(db_actions, name='my_wb.concat') self.assertFalse(action_db.is_system) action_spec = spec_parser.get_action_spec(action_db.spec) self.assertEqual('concat', action_spec.get_name()) self.assertEqual('std.echo', action_spec.get_base()) self.assertEqual(ACTION_DEFINITION, action_db.definition) db_wfs = db_api.get_workflow_definitions() self.assertEqual(2, len(db_wfs)) # Workflow 1. wf1_db = self._assert_single_item(db_wfs, name='my_wb.wf1') wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec) self.assertEqual('wf1', wf1_spec.get_name()) self.assertEqual('reverse', wf1_spec.get_type()) self.assertListEqual(['wf_test'], wf1_spec.get_tags()) self.assertListEqual(['wf_test'], wf1_db.tags) self.assertEqual(namespace, wf1_db.namespace) self.assertEqual(WORKBOOK_WF1_DEFINITION, wf1_db.definition) # Workflow 2. wf2_db = self._assert_single_item(db_wfs, name='my_wb.wf2') wf2_spec = spec_parser.get_workflow_spec(wf2_db.spec) self.assertEqual('wf2', wf2_spec.get_name()) self.assertEqual('direct', wf2_spec.get_type()) self.assertEqual(namespace, wf2_db.namespace) self.assertEqual(WORKBOOK_WF2_DEFINITION, wf2_db.definition) def test_create_same_workbook_in_different_namespaces(self): first_namespace = 'first_namespace' second_namespace = 'second_namespace' first_wb = wb_service.create_workbook_v2(WORKBOOK, namespace=first_namespace) self.assertIsNotNone(first_wb) self.assertEqual('my_wb', first_wb.name) self.assertEqual(first_namespace, first_wb.namespace) second_wb = wb_service.create_workbook_v2(WORKBOOK, namespace=second_namespace) self.assertIsNotNone(second_wb) self.assertEqual('my_wb', second_wb.name) self.assertEqual(second_namespace, second_wb.namespace) def test_create_workbook_with_default_namespace(self): wb_db = wb_service.create_workbook_v2(WORKBOOK) self.assertIsNotNone(wb_db) self.assertEqual('my_wb', wb_db.name) self.assertEqual('', wb_db.namespace) db_api.delete_workbook('my_wb') def test_update_workbook(self): namespace = 'test_workbook_service_0123_namespace' # Create workbook. wb_db = wb_service.create_workbook_v2(WORKBOOK, namespace=namespace) self.assertIsNotNone(wb_db) self.assertEqual(2, len(db_api.get_workflow_definitions())) # Update workbook. wb_db = wb_service.update_workbook_v2( UPDATED_WORKBOOK, namespace=namespace ) self.assertIsNotNone(wb_db) self.assertEqual('my_wb', wb_db.name) self.assertEqual(namespace, wb_db.namespace) self.assertEqual(UPDATED_WORKBOOK, wb_db.definition) self.assertListEqual(['test'], wb_db.tags) db_wfs = db_api.get_workflow_definitions() self.assertEqual(2, len(db_wfs)) # Workflow 1. wf1_db = self._assert_single_item(db_wfs, name='my_wb.wf1') wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec) self.assertEqual('wf1', wf1_spec.get_name()) self.assertEqual('direct', wf1_spec.get_type()) self.assertEqual(namespace, wf1_db.namespace) self.assertEqual(UPDATED_WORKBOOK_WF1_DEFINITION, wf1_db.definition) # Workflow 2. wf2_db = self._assert_single_item(db_wfs, name='my_wb.wf2') wf2_spec = spec_parser.get_workflow_spec(wf2_db.spec) self.assertEqual('wf2', wf2_spec.get_name()) self.assertEqual('reverse', wf2_spec.get_type()) self.assertEqual(namespace, wf2_db.namespace) self.assertEqual(UPDATED_WORKBOOK_WF2_DEFINITION, wf2_db.definition) def test_delete_workbook(self): namespace = 'pqr' # Create workbook. wb_service.create_workbook_v2(WORKBOOK, namespace=namespace) db_wfs = db_api.get_workflow_definitions() db_actions = db_api.get_action_definitions(name='my_wb.concat', namespace=namespace) self.assertEqual(2, len(db_wfs)) self.assertEqual(1, len(db_actions)) db_api.delete_workbook('my_wb', namespace=namespace) db_wfs = db_api.get_workflow_definitions() db_actions = db_api.get_action_definitions(name='my_wb.concat', namespace=namespace) # Deleting workbook shouldn't delete workflows and actions self.assertEqual(2, len(db_wfs)) self.assertEqual(1, len(db_actions))
openstack/mistral
mistral/tests/unit/services/test_workbook_service.py
Python
apache-2.0
8,847
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.compaction; import java.io.DataOutputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.*; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.primitives.Ints; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.RowPosition; import org.apache.cassandra.dht.Bounds; import org.apache.cassandra.dht.Token; import org.apache.cassandra.io.sstable.*; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.Pair; public class LeveledManifest { private static final Logger logger = LoggerFactory.getLogger(LeveledManifest.class); /** * limit the number of L0 sstables we do at once, because compaction bloom filter creation * uses a pessimistic estimate of how many keys overlap (none), so we risk wasting memory * or even OOMing when compacting highly overlapping sstables */ private static final int MAX_COMPACTING_L0 = 32; private final ColumnFamilyStore cfs; private final List<SSTableReader>[] generations; private final RowPosition[] lastCompactedKeys; private final int maxSSTableSizeInBytes; private final SizeTieredCompactionStrategyOptions options; private LeveledManifest(ColumnFamilyStore cfs, int maxSSTableSizeInMB, SizeTieredCompactionStrategyOptions options) { this.cfs = cfs; this.maxSSTableSizeInBytes = maxSSTableSizeInMB * 1024 * 1024; this.options = options; // allocate enough generations for a PB of data, with a 1-MB sstable size. (Note that if maxSSTableSize is // updated, we will still have sstables of the older, potentially smaller size. So don't make this // dependent on maxSSTableSize.) int n = (int) Math.log10(1000 * 1000 * 1000); generations = new List[n]; lastCompactedKeys = new RowPosition[n]; for (int i = 0; i < generations.length; i++) { generations[i] = new ArrayList<SSTableReader>(); lastCompactedKeys[i] = cfs.partitioner.getMinimumToken().minKeyBound(); } } public static LeveledManifest create(ColumnFamilyStore cfs, int maxSSTableSize, List<SSTableReader> sstables) { return create(cfs, maxSSTableSize, sstables, new SizeTieredCompactionStrategyOptions()); } public static LeveledManifest create(ColumnFamilyStore cfs, int maxSSTableSize, Iterable<SSTableReader> sstables, SizeTieredCompactionStrategyOptions options) { LeveledManifest manifest = new LeveledManifest(cfs, maxSSTableSize, options); // ensure all SSTables are in the manifest for (SSTableReader ssTableReader : sstables) { manifest.add(ssTableReader); } for (int i = 1; i < manifest.getAllLevelSize().length; i++) { manifest.repairOverlappingSSTables(i); } return manifest; } public synchronized void add(SSTableReader reader) { int level = reader.getSSTableLevel(); assert level < generations.length : "Invalid level " + level + " out of " + (generations.length - 1); logDistribution(); logger.debug("Adding {} to L{}", reader, level); generations[level].add(reader); } /** * if the number of SSTables in the current compacted set *by itself* exceeds the target level's * (regardless of the level's current contents), find an empty level instead */ private int skipLevels(int newLevel, Iterable<SSTableReader> added) { // Note that we now check if the sstables included in the compaction, *before* the compaction, fit in the next level. // This is needed since we need to decide before the actual compaction what level they will be in. // This should be safe, we might skip levels where the compacted data could have fit but that should be ok. while (maxBytesForLevel(newLevel) < SSTableReader.getTotalBytes(added) && generations[(newLevel + 1)].isEmpty()) { newLevel++; } return newLevel; } public synchronized void replace(Collection<SSTableReader> removed, Collection<SSTableReader> added) { assert !removed.isEmpty(); // use add() instead of promote when adding new sstables logDistribution(); if (logger.isDebugEnabled()) logger.debug("Replacing [{}]", toString(removed)); // the level for the added sstables is the max of the removed ones, // plus one if the removed were all on the same level for (SSTableReader sstable : removed) remove(sstable); // it's valid to do a remove w/o an add (e.g. on truncate) if (added.isEmpty()) return; if (logger.isDebugEnabled()) logger.debug("Adding [{}]", toString(added)); int minLevel = Integer.MAX_VALUE; for (SSTableReader ssTableReader : added) { minLevel = Math.min(minLevel, ssTableReader.getSSTableLevel()); add(ssTableReader); } lastCompactedKeys[minLevel] = SSTable.sstableOrdering.max(added).last; } public synchronized void repairOverlappingSSTables(int level) { SSTableReader previous = null; Collections.sort(generations[level], SSTable.sstableComparator); List<SSTableReader> outOfOrderSSTables = new ArrayList<SSTableReader>(); for (SSTableReader current : generations[level]) { if (previous != null && current.first.compareTo(previous.last) <= 0) { logger.warn(String.format("At level %d, %s [%s, %s] overlaps %s [%s, %s]. This could be caused by a bug in Cassandra 1.1.0 .. 1.1.3 or due to the fact that you have dropped sstables from another node into the data directory. " + "Sending back to L0. If you didn't drop in sstables, and have not yet run scrub, you should do so since you may also have rows out-of-order within an sstable", level, previous, previous.first, previous.last, current, current.first, current.last)); outOfOrderSSTables.add(current); } else { previous = current; } } if (!outOfOrderSSTables.isEmpty()) { for (SSTableReader sstable : outOfOrderSSTables) sendBackToL0(sstable); } } private synchronized void sendBackToL0(SSTableReader sstable) { remove(sstable); String metaDataFile = sstable.descriptor.filenameFor(Component.STATS); try { mutateLevel(Pair.create(sstable.getSSTableMetadata(), sstable.getAncestors()), sstable.descriptor, metaDataFile, 0); sstable.reloadSSTableMetadata(); add(sstable); } catch (IOException e) { throw new RuntimeException("Could not reload sstable meta data", e); } } private String toString(Collection<SSTableReader> sstables) { StringBuilder builder = new StringBuilder(); for (SSTableReader sstable : sstables) { builder.append(sstable.descriptor.cfname) .append('-') .append(sstable.descriptor.generation) .append("(L") .append(sstable.getSSTableLevel()) .append("), "); } return builder.toString(); } @VisibleForTesting long maxBytesForLevel(int level) { if (level == 0) return 4L * maxSSTableSizeInBytes; double bytes = Math.pow(10, level) * maxSSTableSizeInBytes; if (bytes > Long.MAX_VALUE) throw new RuntimeException("At most " + Long.MAX_VALUE + " bytes may be in a compaction level; your maxSSTableSize must be absurdly high to compute " + bytes); return (long) bytes; } /** * @return highest-priority sstables to compact, and level to compact them to * If no compactions are necessary, will return null */ public synchronized Pair<? extends Collection<SSTableReader>, Integer> getCompactionCandidates() { // LevelDB gives each level a score of how much data it contains vs its ideal amount, and // compacts the level with the highest score. But this falls apart spectacularly once you // get behind. Consider this set of levels: // L0: 988 [ideal: 4] // L1: 117 [ideal: 10] // L2: 12 [ideal: 100] // // The problem is that L0 has a much higher score (almost 250) than L1 (11), so what we'll // do is compact a batch of MAX_COMPACTING_L0 sstables with all 117 L1 sstables, and put the // result (say, 120 sstables) in L1. Then we'll compact the next batch of MAX_COMPACTING_L0, // and so forth. So we spend most of our i/o rewriting the L1 data with each batch. // // If we could just do *all* L0 a single time with L1, that would be ideal. But we can't // -- see the javadoc for MAX_COMPACTING_L0. // // LevelDB's way around this is to simply block writes if L0 compaction falls behind. // We don't have that luxury. // // So instead, we // 1) force compacting higher levels first, which minimizes the i/o needed to compact // optimially which gives us a long term win, and // 2) if L0 falls behind, we will size-tiered compact it to reduce read overhead until // we can catch up on the higher levels. // // This isn't a magic wand -- if you are consistently writing too fast for LCS to keep // up, you're still screwed. But if instead you have intermittent bursts of activity, // it can help a lot. for (int i = generations.length - 1; i > 0; i--) { List<SSTableReader> sstables = generations[i]; if (sstables.isEmpty()) continue; // mostly this just avoids polluting the debug log with zero scores // we want to calculate score excluding compacting ones Set<SSTableReader> sstablesInLevel = Sets.newHashSet(sstables); Set<SSTableReader> remaining = Sets.difference(sstablesInLevel, cfs.getDataTracker().getCompacting()); double score = (double)SSTableReader.getTotalBytes(remaining) / (double)maxBytesForLevel(i); logger.debug("Compaction score for level {} is {}", i, score); if (score > 1.001) { // before proceeding with a higher level, let's see if L0 is far enough behind to warrant STCS if (generations[0].size() > MAX_COMPACTING_L0) { Iterable<SSTableReader> candidates = cfs.getDataTracker().getUncompactingSSTables(generations[0]); List<Pair<SSTableReader,Long>> pairs = SizeTieredCompactionStrategy.createSSTableAndLengthPairs(AbstractCompactionStrategy.filterSuspectSSTables(candidates)); List<List<SSTableReader>> buckets = SizeTieredCompactionStrategy.getBuckets(pairs, options.bucketHigh, options.bucketLow, options.minSSTableSize); List<SSTableReader> mostInteresting = SizeTieredCompactionStrategy.mostInterestingBucket(buckets, 4, 32); if (!mostInteresting.isEmpty()) return Pair.create(mostInteresting, 0); } // L0 is fine, proceed with this level Collection<SSTableReader> candidates = getCandidatesFor(i); if (logger.isDebugEnabled()) logger.debug("Compaction candidates for L{} are {}", i, toString(candidates)); if (!candidates.isEmpty()) return Pair.create(candidates, getNextLevel(candidates)); } } // Higher levels are happy, time for a standard, non-STCS L0 compaction if (generations[0].isEmpty()) return null; Collection<SSTableReader> candidates = getCandidatesFor(0); if (candidates.isEmpty()) return null; return Pair.create(candidates, getNextLevel(candidates)); } public synchronized int getLevelSize(int i) { if (i >= generations.length) throw new ArrayIndexOutOfBoundsException("Maximum valid generation is " + (generations.length - 1)); return generations[i].size(); } public synchronized int[] getAllLevelSize() { int[] counts = new int[generations.length]; for (int i = 0; i < counts.length; i++) counts[i] = generations[i].size(); return counts; } private void logDistribution() { if (logger.isDebugEnabled()) { for (int i = 0; i < generations.length; i++) { if (!generations[i].isEmpty()) { logger.debug("L{} contains {} SSTables ({} bytes) in {}", i, generations[i].size(), SSTableReader.getTotalBytes(generations[i]), this); } } } } @VisibleForTesting public int remove(SSTableReader reader) { int level = reader.getSSTableLevel(); assert level >= 0 : reader + " not present in manifest: "+level; generations[level].remove(reader); return level; } private static Set<SSTableReader> overlapping(Collection<SSTableReader> candidates, Iterable<SSTableReader> others) { assert !candidates.isEmpty(); /* * Picking each sstable from others that overlap one of the sstable of candidates is not enough * because you could have the following situation: * candidates = [ s1(a, c), s2(m, z) ] * others = [ s3(e, g) ] * In that case, s2 overlaps none of s1 or s2, but if we compact s1 with s2, the resulting sstable will * overlap s3, so we must return s3. * * Thus, the correct approach is to pick sstables overlapping anything between the first key in all * the candidate sstables, and the last. */ Iterator<SSTableReader> iter = candidates.iterator(); SSTableReader sstable = iter.next(); Token first = sstable.first.token; Token last = sstable.last.token; while (iter.hasNext()) { sstable = iter.next(); first = first.compareTo(sstable.first.token) <= 0 ? first : sstable.first.token; last = last.compareTo(sstable.last.token) >= 0 ? last : sstable.last.token; } return overlapping(first, last, others); } @VisibleForTesting static Set<SSTableReader> overlapping(SSTableReader sstable, Iterable<SSTableReader> others) { return overlapping(sstable.first.token, sstable.last.token, others); } /** * @return sstables from @param sstables that contain keys between @param start and @param end, inclusive. */ private static Set<SSTableReader> overlapping(Token start, Token end, Iterable<SSTableReader> sstables) { assert start.compareTo(end) <= 0; Set<SSTableReader> overlapped = new HashSet<SSTableReader>(); Bounds<Token> promotedBounds = new Bounds<Token>(start, end); for (SSTableReader candidate : sstables) { Bounds<Token> candidateBounds = new Bounds<Token>(candidate.first.token, candidate.last.token); if (candidateBounds.intersects(promotedBounds)) overlapped.add(candidate); } return overlapped; } private static final Predicate<SSTableReader> suspectP = new Predicate<SSTableReader>() { public boolean apply(SSTableReader candidate) { return candidate.isMarkedSuspect(); } }; /** * @return highest-priority sstables to compact for the given level. * If no compactions are possible (because of concurrent compactions or because some sstables are blacklisted * for prior failure), will return an empty list. Never returns null. */ private Collection<SSTableReader> getCandidatesFor(int level) { assert !generations[level].isEmpty(); logger.debug("Choosing candidates for L{}", level); final Set<SSTableReader> compacting = cfs.getDataTracker().getCompacting(); if (level == 0) { Set<SSTableReader> compactingL0 = ImmutableSet.copyOf(Iterables.filter(generations[0], Predicates.in(compacting))); // L0 is the dumping ground for new sstables which thus may overlap each other. // // We treat L0 compactions specially: // 1a. add sstables to the candidate set until we have at least maxSSTableSizeInMB // 1b. prefer choosing older sstables as candidates, to newer ones // 1c. any L0 sstables that overlap a candidate, will also become candidates // 2. At most MAX_COMPACTING_L0 sstables from L0 will be compacted at once // 3. If total candidate size is less than maxSSTableSizeInMB, we won't bother compacting with L1, // and the result of the compaction will stay in L0 instead of being promoted (see promote()) // // Note that we ignore suspect-ness of L1 sstables here, since if an L1 sstable is suspect we're // basically screwed, since we expect all or most L0 sstables to overlap with each L1 sstable. // So if an L1 sstable is suspect we can't do much besides try anyway and hope for the best. Set<SSTableReader> candidates = new HashSet<SSTableReader>(); Set<SSTableReader> remaining = new HashSet<SSTableReader>(); Iterables.addAll(remaining, Iterables.filter(generations[0], Predicates.not(suspectP))); for (SSTableReader sstable : ageSortedSSTables(remaining)) { if (candidates.contains(sstable)) continue; Sets.SetView<SSTableReader> overlappedL0 = Sets.union(Collections.singleton(sstable), overlapping(sstable, remaining)); if (!Sets.intersection(overlappedL0, compactingL0).isEmpty()) continue; for (SSTableReader newCandidate : overlappedL0) { candidates.add(newCandidate); remaining.remove(newCandidate); } if (candidates.size() > MAX_COMPACTING_L0) { // limit to only the MAX_COMPACTING_L0 oldest candidates candidates = new HashSet<SSTableReader>(ageSortedSSTables(candidates).subList(0, MAX_COMPACTING_L0)); break; } } // leave everything in L0 if we didn't end up with a full sstable's worth of data if (SSTable.getTotalBytes(candidates) > maxSSTableSizeInBytes) { // add sstables from L1 that overlap candidates // if the overlapping ones are already busy in a compaction, leave it out. // TODO try to find a set of L0 sstables that only overlaps with non-busy L1 sstables candidates = Sets.union(candidates, overlapping(candidates, generations[1])); } if (candidates.size() < 2) return Collections.emptyList(); else return candidates; } // for non-L0 compactions, pick up where we left off last time Collections.sort(generations[level], SSTable.sstableComparator); int start = 0; // handles case where the prior compaction touched the very last range for (int i = 0; i < generations[level].size(); i++) { SSTableReader sstable = generations[level].get(i); if (sstable.first.compareTo(lastCompactedKeys[level]) > 0) { start = i; break; } } // look for a non-suspect keyspace to compact with, starting with where we left off last time, // and wrapping back to the beginning of the generation if necessary for (int i = 0; i < generations[level].size(); i++) { SSTableReader sstable = generations[level].get((start + i) % generations[level].size()); Set<SSTableReader> candidates = Sets.union(Collections.singleton(sstable), overlapping(sstable, generations[level + 1])); if (Iterables.any(candidates, suspectP)) continue; if (Sets.intersection(candidates, compacting).isEmpty()) return candidates; } // all the sstables were suspect or overlapped with something suspect return Collections.emptyList(); } private List<SSTableReader> ageSortedSSTables(Collection<SSTableReader> candidates) { List<SSTableReader> ageSortedCandidates = new ArrayList<SSTableReader>(candidates); Collections.sort(ageSortedCandidates, SSTable.maxTimestampComparator); return ageSortedCandidates; } @Override public String toString() { return "Manifest@" + hashCode(); } public int getLevelCount() { for (int i = generations.length - 1; i >= 0; i--) { if (generations[i].size() > 0) return i; } return 0; } public synchronized SortedSet<SSTableReader> getLevelSorted(int level, Comparator<SSTableReader> comparator) { return ImmutableSortedSet.copyOf(comparator, generations[level]); } public List<SSTableReader> getLevel(int i) { return generations[i]; } public synchronized int getEstimatedTasks() { long tasks = 0; long[] estimated = new long[generations.length]; for (int i = generations.length - 1; i >= 0; i--) { List<SSTableReader> sstables = generations[i]; estimated[i] = Math.max(0L, SSTableReader.getTotalBytes(sstables) - maxBytesForLevel(i)) / maxSSTableSizeInBytes; tasks += estimated[i]; } logger.debug("Estimating {} compactions to do for {}.{}", Arrays.toString(estimated), cfs.keyspace.getName(), cfs.name); return Ints.checkedCast(tasks); } public int getNextLevel(Collection<SSTableReader> sstables) { int maximumLevel = Integer.MIN_VALUE; int minimumLevel = Integer.MAX_VALUE; for (SSTableReader sstable : sstables) { maximumLevel = Math.max(sstable.getSSTableLevel(), maximumLevel); minimumLevel = Math.min(sstable.getSSTableLevel(), minimumLevel); } int newLevel; if (minimumLevel == 0 && minimumLevel == maximumLevel && SSTable.getTotalBytes(sstables) < maxSSTableSizeInBytes) { newLevel = 0; } else { newLevel = minimumLevel == maximumLevel ? maximumLevel + 1 : maximumLevel; newLevel = skipLevels(newLevel, sstables); assert newLevel > 0; } return newLevel; } /** * Scary method mutating existing sstable component * * Tries to do it safely by moving the new file on top of the old one * * Caller needs to reload the sstable metadata (sstableReader.reloadSSTableMetadata()) * * @see org.apache.cassandra.io.sstable.SSTableReader#reloadSSTableMetadata() * * @param oldMetadata * @param descriptor * @param filename * @param level * @throws IOException */ public static synchronized void mutateLevel(Pair<SSTableMetadata, Set<Integer>> oldMetadata, Descriptor descriptor, String filename, int level) throws IOException { logger.debug("Mutating {} to level {}", descriptor.filenameFor(Component.STATS), level); SSTableMetadata metadata = SSTableMetadata.copyWithNewSSTableLevel(oldMetadata.left, level); DataOutputStream out = new DataOutputStream(new FileOutputStream(filename + "-tmp")); SSTableMetadata.serializer.legacySerialize(metadata, oldMetadata.right, descriptor, out); out.flush(); out.close(); // we cant move a file on top of another file in windows: if (!FBUtilities.isUnix()) FileUtils.delete(filename); FileUtils.renameWithConfirm(filename + "-tmp", filename); } }
sbtourist/cassandra
src/java/org/apache/cassandra/db/compaction/LeveledManifest.java
Java
apache-2.0
26,099
import Check from "../../Core/Check.js"; import Cesium3DTilesInspector from "../Cesium3DTilesInspector/Cesium3DTilesInspector.js"; /** * A mixin which adds the {@link Cesium3DTilesInspector} widget to the {@link Viewer} widget. * Rather than being called directly, this function is normally passed as * a parameter to {@link Viewer#extend}, as shown in the example below. * @exports viewerCesium3DTilesInspectorMixin * * @param {Viewer} viewer The viewer instance. * * @example * var viewer = new Cesium.Viewer('cesiumContainer'); * viewer.extend(Cesium.viewerCesium3DTilesInspectorMixin); */ function viewerCesium3DTilesInspectorMixin(viewer) { //>>includeStart('debug', pragmas.debug); Check.typeOf.object("viewer", viewer); //>>includeEnd('debug'); var container = document.createElement("div"); container.className = "cesium-viewer-cesium3DTilesInspectorContainer"; viewer.container.appendChild(container); var cesium3DTilesInspector = new Cesium3DTilesInspector( container, viewer.scene ); Object.defineProperties(viewer, { cesium3DTilesInspector: { get: function () { return cesium3DTilesInspector; }, }, }); } export default viewerCesium3DTilesInspectorMixin;
YonatanKra/cesium
Source/Widgets/Viewer/viewerCesium3DTilesInspectorMixin.js
JavaScript
apache-2.0
1,237
# Copyright (c) 2016 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from neutron.plugins.ml2.drivers.l2pop import rpc as l2pop_rpc from neutron.plugins.ml2 import managers from neutron.plugins.ml2 import rpc as rpc from neutron_lib.agent import topics class Tunnel_Calls(object): """Common tunnel calls for L2 agent.""" def __init__(self): self._construct_rpc_stuff() def _construct_rpc_stuff(self): self.notifier = rpc.AgentNotifierApi(topics.AGENT) self.type_manager = managers.TypeManager() self.tunnel_rpc_obj = rpc.RpcCallbacks(self.notifier, self.type_manager) def trigger_tunnel_sync(self, context, tunnel_ip): """Sends tunnel sync RPC message to the neutron L2 agent. """ tunnel_dict = {'tunnel_ip': tunnel_ip, 'tunnel_type': 'vxlan'} self.tunnel_rpc_obj.tunnel_sync(context, **tunnel_dict) def trigger_l2pop_sync(self, context, other_fdb_entries): """Sends L2pop ADD RPC message to the neutron L2 agent.""" l2pop_rpc.L2populationAgentNotifyAPI( ).add_fdb_entries(context, other_fdb_entries) def trigger_l2pop_delete(self, context, other_fdb_entries, host=None): """Sends L2pop DELETE RPC message to the neutron L2 agent.""" l2pop_rpc.L2populationAgentNotifyAPI( ).remove_fdb_entries(context, other_fdb_entries, host)
openstack/networking-l2gw
networking_l2gw/services/l2gateway/common/tunnel_calls.py
Python
apache-2.0
2,017
<?php require 'dbms_basic.php'; ?> <?php session_start(); $li = mysql_connect($dbhost, $dbuser, $dbpass); if(!$li ) { die('Could not connect: ' . mysql_error()); } $conc = mysql_select_db($dbdata,$li); if(!$conc ) { die('Could not connect to db: ' . mysql_error()); } $uname=$_POST['uname']; $psswd=$_POST['pass']; $sql="SELECT uid FROM logins WHERE uname='$uname' and upass='$psswd'"; $res=mysql_result(mysql_query($sql),0); if(!$res) { header("location:login.phtml"); } else { $_SESSION["name"]=$uname; $_SESSION["id"]=$res; header("location:home.phtml"); } mysql_close($li); ?>
sara-02/IWH15
pages/login.php
PHP
apache-2.0
620
// @ts-nocheck 'use strict'; const _ = require('lodash'); const isStandardSyntaxAtRule = require('../../utils/isStandardSyntaxAtRule'); const report = require('../../utils/report'); const ruleMessages = require('../../utils/ruleMessages'); const validateOptions = require('../../utils/validateOptions'); const vendor = require('../../utils/vendor'); const ruleName = 'at-rule-whitelist'; const messages = ruleMessages(ruleName, { rejected: (name) => `Unexpected at-rule "${name}"`, }); function rule(listInput) { // To allow for just a string as a parameter (not only arrays of strings) const list = [].concat(listInput); return (root, result) => { const validOptions = validateOptions(result, ruleName, { actual: list, possible: [_.isString], }); if (!validOptions) { return; } result.warn(`'${ruleName}' has been deprecated. Instead use 'at-rule-allowed-list'.`, { stylelintType: 'deprecation', stylelintReference: `https://github.com/stylelint/stylelint/blob/13.7.0/lib/rules/${ruleName}/README.md`, }); root.walkAtRules((atRule) => { const name = atRule.name; if (!isStandardSyntaxAtRule(atRule)) { return; } if (list.includes(vendor.unprefixed(name).toLowerCase())) { return; } report({ message: messages.rejected(name), node: atRule, result, ruleName, }); }); }; } rule.primaryOptionArray = true; rule.ruleName = ruleName; rule.messages = messages; rule.meta = { deprecated: true }; module.exports = rule;
kyleterry/sufr
pkg/ui/node_modules/stylelint/lib/rules/at-rule-whitelist/index.js
JavaScript
apache-2.0
1,508
/* * slip.c This module implements the SLIP protocol for kernel-based * devices like TTY. It interfaces between a raw TTY, and the * kernel's INET protocol layers. * * Version: @(#)slip.c 0.8.3 12/24/94 * * Authors: Laurence Culhane, <loz@holmes.demon.co.uk> * Fred N. van Kempen, <waltje@uwalt.nl.mugnet.org> * * Fixes: * Alan Cox : Sanity checks and avoid tx overruns. * Has a new sl->mtu field. * Alan Cox : Found cause of overrun. ifconfig sl0 mtu upwards. * Driver now spots this and grows/shrinks its buffers(hack!). * Memory leak if you run out of memory setting up a slip driver fixed. * Matt Dillon : Printable slip (borrowed from NET2E) * Pauline Middelink : Slip driver fixes. * Alan Cox : Honours the old SL_COMPRESSED flag * Alan Cox : KISS AX.25 and AXUI IP support * Michael Riepe : Automatic CSLIP recognition added * Charles Hedrick : CSLIP header length problem fix. * Alan Cox : Corrected non-IP cases of the above. * Alan Cox : Now uses hardware type as per FvK. * Alan Cox : Default to 192.168.0.0 (RFC 1597) * A.N.Kuznetsov : dev_tint() recursion fix. * Dmitry Gorodchanin : SLIP memory leaks * Dmitry Gorodchanin : Code cleanup. Reduce tty driver * buffering from 4096 to 256 bytes. * Improving SLIP response time. * CONFIG_SLIP_MODE_SLIP6. * ifconfig sl? up & down now works correctly. * Modularization. * Alan Cox : Oops - fix AX.25 buffer lengths * Dmitry Gorodchanin : Even more cleanups. Preserve CSLIP * statistics. Include CSLIP code only * if it really needed. * Alan Cox : Free slhc buffers in the right place. * Alan Cox : Allow for digipeated IP over AX.25 * Matti Aarnio : Dynamic SLIP devices, with ideas taken * from Jim Freeman's <jfree@caldera.com> * dynamic PPP devices. We do NOT kfree() * device entries, just reg./unreg. them * as they are needed. We kfree() them * at module cleanup. * With MODULE-loading ``insmod'', user can * issue parameter: slip_maxdev=1024 * (Or how much he/she wants.. Default is 256) * * Stanislav Voronyi : Slip line checking, with ideas taken * from multislip BSDI driver which was written * by Igor Chechik, RELCOM Corp. Only algorithms * have been ported to Linux SLIP driver. * Vitaly E. Lavrov : Sane behaviour on tty hangup. * Alexey Kuznetsov : Cleanup interfaces to tty&netdevice modules. */ #define SL_CHECK_TRANSMIT #include <linux/config.h> #include <linux/module.h> #include <asm/system.h> #include <asm/uaccess.h> #include <asm/bitops.h> #include <linux/string.h> #include <linux/mm.h> #include <linux/interrupt.h> #include <linux/in.h> #include <linux/tty.h> #include <linux/errno.h> #include <linux/netdevice.h> #include <linux/etherdevice.h> #include <linux/skbuff.h> #include <linux/rtnetlink.h> #include <linux/if_arp.h> #include <linux/if_slip.h> #include <linux/init.h> #include "slip.h" #ifdef CONFIG_INET #include <linux/ip.h> #include <linux/tcp.h> #include <net/slhc_vj.h> #endif #define SLIP_VERSION "0.8.4-NET3.019-NEWTTY" static struct net_device **slip_devs; int slip_maxdev = SL_NRUNIT; /* Can be overridden with insmod! */ MODULE_PARM(slip_maxdev, "i"); MODULE_PARM_DESC(slip_maxdev, "Maximum number of slip devices"); static int slip_esc(unsigned char *p, unsigned char *d, int len); static void slip_unesc(struct slip *sl, unsigned char c); #ifdef CONFIG_SLIP_MODE_SLIP6 static int slip_esc6(unsigned char *p, unsigned char *d, int len); static void slip_unesc6(struct slip *sl, unsigned char c); #endif #ifdef CONFIG_SLIP_SMART static void sl_keepalive(unsigned long sls); static void sl_outfill(unsigned long sls); static int sl_ioctl(struct net_device *dev,struct ifreq *rq,int cmd); #endif /******************************** * Buffer administration routines: * sl_alloc_bufs() * sl_free_bufs() * sl_realloc_bufs() * * NOTE: sl_realloc_bufs != sl_free_bufs + sl_alloc_bufs, because * sl_realloc_bufs provides strong atomicity and reallocation * on actively running device. *********************************/ /* Allocate channel buffers. */ static int sl_alloc_bufs(struct slip *sl, int mtu) { int err = -ENOBUFS; unsigned long len; char * rbuff = NULL; char * xbuff = NULL; #ifdef SL_INCLUDE_CSLIP char * cbuff = NULL; struct slcompress *slcomp = NULL; #endif /* * Allocate the SLIP frame buffers: * * rbuff Receive buffer. * xbuff Transmit buffer. * cbuff Temporary compression buffer. */ len = mtu * 2; /* * allow for arrival of larger UDP packets, even if we say not to * also fixes a bug in which SunOS sends 512-byte packets even with * an MSS of 128 */ if (len < 576 * 2) len = 576 * 2; rbuff = kmalloc(len + 4, GFP_KERNEL); if (rbuff == NULL) goto err_exit; xbuff = kmalloc(len + 4, GFP_KERNEL); if (xbuff == NULL) goto err_exit; #ifdef SL_INCLUDE_CSLIP cbuff = kmalloc(len + 4, GFP_KERNEL); if (cbuff == NULL) goto err_exit; slcomp = slhc_init(16, 16); if (slcomp == NULL) goto err_exit; #endif spin_lock_bh(&sl->lock); if (sl->tty == NULL) { spin_unlock_bh(&sl->lock); err = -ENODEV; goto err_exit; } sl->mtu = mtu; sl->buffsize = len; sl->rcount = 0; sl->xleft = 0; rbuff = xchg(&sl->rbuff, rbuff); xbuff = xchg(&sl->xbuff, xbuff); #ifdef SL_INCLUDE_CSLIP cbuff = xchg(&sl->cbuff, cbuff); slcomp = xchg(&sl->slcomp, slcomp); #ifdef CONFIG_SLIP_MODE_SLIP6 sl->xdata = 0; sl->xbits = 0; #endif #endif spin_unlock_bh(&sl->lock); err = 0; /* Cleanup */ err_exit: #ifdef SL_INCLUDE_CSLIP if (cbuff) kfree(cbuff); if (slcomp) slhc_free(slcomp); #endif if (xbuff) kfree(xbuff); if (rbuff) kfree(rbuff); return err; } /* Free a SLIP channel buffers. */ static void sl_free_bufs(struct slip *sl) { void * tmp; /* Free all SLIP frame buffers. */ if ((tmp = xchg(&sl->rbuff, NULL)) != NULL) kfree(tmp); if ((tmp = xchg(&sl->xbuff, NULL)) != NULL) kfree(tmp); #ifdef SL_INCLUDE_CSLIP if ((tmp = xchg(&sl->cbuff, NULL)) != NULL) kfree(tmp); if ((tmp = xchg(&sl->slcomp, NULL)) != NULL) slhc_free(tmp); #endif } /* Reallocate slip channel buffers. */ static int sl_realloc_bufs(struct slip *sl, int mtu) { int err = 0; struct net_device *dev = sl->dev; unsigned char *xbuff, *rbuff; #ifdef SL_INCLUDE_CSLIP unsigned char *cbuff; #endif int len = mtu * 2; /* * allow for arrival of larger UDP packets, even if we say not to * also fixes a bug in which SunOS sends 512-byte packets even with * an MSS of 128 */ if (len < 576 * 2) len = 576 * 2; xbuff = (unsigned char *) kmalloc (len + 4, GFP_ATOMIC); rbuff = (unsigned char *) kmalloc (len + 4, GFP_ATOMIC); #ifdef SL_INCLUDE_CSLIP cbuff = (unsigned char *) kmalloc (len + 4, GFP_ATOMIC); #endif #ifdef SL_INCLUDE_CSLIP if (xbuff == NULL || rbuff == NULL || cbuff == NULL) { #else if (xbuff == NULL || rbuff == NULL) { #endif if (mtu >= sl->mtu) { printk(KERN_WARNING "%s: unable to grow slip buffers, MTU change cancelled.\n", dev->name); err = -ENOBUFS; } goto done; } spin_lock_bh(&sl->lock); err = -ENODEV; if (sl->tty == NULL) goto done_on_bh; xbuff = xchg(&sl->xbuff, xbuff); rbuff = xchg(&sl->rbuff, rbuff); #ifdef SL_INCLUDE_CSLIP cbuff = xchg(&sl->cbuff, cbuff); #endif if (sl->xleft) { if (sl->xleft <= len) { memcpy(sl->xbuff, sl->xhead, sl->xleft); } else { sl->xleft = 0; sl->tx_dropped++; } } sl->xhead = sl->xbuff; if (sl->rcount) { if (sl->rcount <= len) { memcpy(sl->rbuff, rbuff, sl->rcount); } else { sl->rcount = 0; sl->rx_over_errors++; set_bit(SLF_ERROR, &sl->flags); } } sl->mtu = mtu; dev->mtu = mtu; sl->buffsize = len; err = 0; done_on_bh: spin_unlock_bh(&sl->lock); done: if (xbuff) kfree(xbuff); if (rbuff) kfree(rbuff); #ifdef SL_INCLUDE_CSLIP if (cbuff) kfree(cbuff); #endif return err; } /* Set the "sending" flag. This must be atomic hence the set_bit. */ static inline void sl_lock(struct slip *sl) { netif_stop_queue(sl->dev); } /* Clear the "sending" flag. This must be atomic, hence the ASM. */ static inline void sl_unlock(struct slip *sl) { netif_wake_queue(sl->dev); } /* Send one completely decapsulated IP datagram to the IP layer. */ static void sl_bump(struct slip *sl) { struct sk_buff *skb; int count; count = sl->rcount; #ifdef SL_INCLUDE_CSLIP if (sl->mode & (SL_MODE_ADAPTIVE | SL_MODE_CSLIP)) { unsigned char c; if ((c = sl->rbuff[0]) & SL_TYPE_COMPRESSED_TCP) { /* ignore compressed packets when CSLIP is off */ if (!(sl->mode & SL_MODE_CSLIP)) { printk(KERN_WARNING "%s: compressed packet ignored\n", sl->dev->name); return; } /* make sure we've reserved enough space for uncompress to use */ if (count + 80 > sl->buffsize) { sl->rx_over_errors++; return; } count = slhc_uncompress(sl->slcomp, sl->rbuff, count); if (count <= 0) { return; } } else if (c >= SL_TYPE_UNCOMPRESSED_TCP) { if (!(sl->mode & SL_MODE_CSLIP)) { /* turn on header compression */ sl->mode |= SL_MODE_CSLIP; sl->mode &= ~SL_MODE_ADAPTIVE; printk(KERN_INFO "%s: header compression turned on\n", sl->dev->name); } sl->rbuff[0] &= 0x4f; if (slhc_remember(sl->slcomp, sl->rbuff, count) <= 0) { return; } } } #endif /* SL_INCLUDE_CSLIP */ sl->rx_bytes+=count; skb = dev_alloc_skb(count); if (skb == NULL) { printk(KERN_WARNING "%s: memory squeeze, dropping packet.\n", sl->dev->name); sl->rx_dropped++; return; } skb->dev = sl->dev; memcpy(skb_put(skb,count), sl->rbuff, count); skb->mac.raw=skb->data; skb->protocol=htons(ETH_P_IP); netif_rx(skb); sl->dev->last_rx = jiffies; sl->rx_packets++; } /* Encapsulate one IP datagram and stuff into a TTY queue. */ static void sl_encaps(struct slip *sl, unsigned char *icp, int len) { unsigned char *p; int actual, count; if (len > sl->mtu) { /* Sigh, shouldn't occur BUT ... */ printk(KERN_WARNING "%s: truncating oversized transmit packet!\n", sl->dev->name); sl->tx_dropped++; sl_unlock(sl); return; } p = icp; #ifdef SL_INCLUDE_CSLIP if (sl->mode & SL_MODE_CSLIP) { len = slhc_compress(sl->slcomp, p, len, sl->cbuff, &p, 1); } #endif #ifdef CONFIG_SLIP_MODE_SLIP6 if(sl->mode & SL_MODE_SLIP6) count = slip_esc6(p, (unsigned char *) sl->xbuff, len); else #endif count = slip_esc(p, (unsigned char *) sl->xbuff, len); /* Order of next two lines is *very* important. * When we are sending a little amount of data, * the transfer may be completed inside driver.write() * routine, because it's running with interrupts enabled. * In this case we *never* got WRITE_WAKEUP event, * if we did not request it before write operation. * 14 Oct 1994 Dmitry Gorodchanin. */ sl->tty->flags |= (1 << TTY_DO_WRITE_WAKEUP); actual = sl->tty->driver->write(sl->tty, 0, sl->xbuff, count); #ifdef SL_CHECK_TRANSMIT sl->dev->trans_start = jiffies; #endif sl->xleft = count - actual; sl->xhead = sl->xbuff + actual; #ifdef CONFIG_SLIP_SMART /* VSV */ clear_bit(SLF_OUTWAIT, &sl->flags); /* reset outfill flag */ #endif } /* * Called by the driver when there's room for more data. If we have * more packets to send, we send them here. */ static void slip_write_wakeup(struct tty_struct *tty) { int actual; struct slip *sl = (struct slip *) tty->disc_data; /* First make sure we're connected. */ if (!sl || sl->magic != SLIP_MAGIC || !netif_running(sl->dev)) { return; } if (sl->xleft <= 0) { /* Now serial buffer is almost free & we can start * transmission of another packet */ sl->tx_packets++; tty->flags &= ~(1 << TTY_DO_WRITE_WAKEUP); sl_unlock(sl); return; } actual = tty->driver->write(tty, 0, sl->xhead, sl->xleft); sl->xleft -= actual; sl->xhead += actual; } static void sl_tx_timeout(struct net_device *dev) { struct slip *sl = (struct slip*)(dev->priv); spin_lock(&sl->lock); if (netif_queue_stopped(dev)) { struct slip *sl = (struct slip*)(dev->priv); if (!netif_running(dev)) goto out; /* May be we must check transmitter timeout here ? * 14 Oct 1994 Dmitry Gorodchanin. */ #ifdef SL_CHECK_TRANSMIT if (time_before(jiffies, dev->trans_start + 20 * HZ)) { /* 20 sec timeout not reached */ goto out; } printk(KERN_WARNING "%s: transmit timed out, %s?\n", dev->name, (sl->tty->driver->chars_in_buffer(sl->tty) || sl->xleft) ? "bad line quality" : "driver error"); sl->xleft = 0; sl->tty->flags &= ~(1 << TTY_DO_WRITE_WAKEUP); sl_unlock(sl); #endif } out: spin_unlock(&sl->lock); } /* Encapsulate an IP datagram and kick it into a TTY queue. */ static int sl_xmit(struct sk_buff *skb, struct net_device *dev) { struct slip *sl = (struct slip*)(dev->priv); spin_lock(&sl->lock); if (!netif_running(dev)) { spin_unlock(&sl->lock); printk(KERN_WARNING "%s: xmit call when iface is down\n", dev->name); dev_kfree_skb(skb); return 0; } if (sl->tty == NULL) { spin_unlock(&sl->lock); dev_kfree_skb(skb); return 0; } sl_lock(sl); sl->tx_bytes+=skb->len; sl_encaps(sl, skb->data, skb->len); spin_unlock(&sl->lock); dev_kfree_skb(skb); return 0; } /****************************************** * Routines looking at netdevice side. ******************************************/ /* Netdevice UP -> DOWN routine */ static int sl_close(struct net_device *dev) { struct slip *sl = (struct slip*)(dev->priv); spin_lock_bh(&sl->lock); if (sl->tty) { /* TTY discipline is running. */ sl->tty->flags &= ~(1 << TTY_DO_WRITE_WAKEUP); } netif_stop_queue(dev); sl->rcount = 0; sl->xleft = 0; spin_unlock_bh(&sl->lock); return 0; } /* Netdevice DOWN -> UP routine */ static int sl_open(struct net_device *dev) { struct slip *sl = (struct slip*)(dev->priv); if (sl->tty==NULL) return -ENODEV; sl->flags &= (1 << SLF_INUSE); netif_start_queue(dev); return 0; } /* Netdevice change MTU request */ static int sl_change_mtu(struct net_device *dev, int new_mtu) { struct slip *sl = (struct slip*)(dev->priv); if (new_mtu < 68 || new_mtu > 65534) return -EINVAL; if (new_mtu != dev->mtu) return sl_realloc_bufs(sl, new_mtu); return 0; } /* Netdevice get statistics request */ static struct net_device_stats * sl_get_stats(struct net_device *dev) { static struct net_device_stats stats; struct slip *sl = (struct slip*)(dev->priv); #ifdef SL_INCLUDE_CSLIP struct slcompress *comp; #endif memset(&stats, 0, sizeof(struct net_device_stats)); stats.rx_packets = sl->rx_packets; stats.tx_packets = sl->tx_packets; stats.rx_bytes = sl->rx_bytes; stats.tx_bytes = sl->tx_bytes; stats.rx_dropped = sl->rx_dropped; stats.tx_dropped = sl->tx_dropped; stats.tx_errors = sl->tx_errors; stats.rx_errors = sl->rx_errors; stats.rx_over_errors = sl->rx_over_errors; #ifdef SL_INCLUDE_CSLIP stats.rx_fifo_errors = sl->rx_compressed; stats.tx_fifo_errors = sl->tx_compressed; stats.collisions = sl->tx_misses; comp = sl->slcomp; if (comp) { stats.rx_fifo_errors += comp->sls_i_compressed; stats.rx_dropped += comp->sls_i_tossed; stats.tx_fifo_errors += comp->sls_o_compressed; stats.collisions += comp->sls_o_misses; } #endif /* CONFIG_INET */ return (&stats); } /* Netdevice register callback */ static int sl_init(struct net_device *dev) { struct slip *sl = (struct slip*)(dev->priv); /* * Finish setting up the DEVICE info. */ dev->mtu = sl->mtu; dev->type = ARPHRD_SLIP + sl->mode; #ifdef SL_CHECK_TRANSMIT dev->tx_timeout = sl_tx_timeout; dev->watchdog_timeo = 20*HZ; #endif return 0; } static void sl_uninit(struct net_device *dev) { struct slip *sl = (struct slip*)(dev->priv); sl_free_bufs(sl); } static void sl_setup(struct net_device *dev) { dev->init = sl_init; dev->uninit = sl_uninit; dev->open = sl_open; dev->destructor = (void (*)(struct net_device *))kfree; dev->stop = sl_close; dev->get_stats = sl_get_stats; dev->change_mtu = sl_change_mtu; dev->hard_start_xmit = sl_xmit; #ifdef CONFIG_SLIP_SMART dev->do_ioctl = sl_ioctl; #endif dev->hard_header_len = 0; dev->addr_len = 0; dev->tx_queue_len = 10; SET_MODULE_OWNER(dev); /* New-style flags. */ dev->flags = IFF_NOARP|IFF_POINTOPOINT|IFF_MULTICAST; } /****************************************** Routines looking at TTY side. ******************************************/ static int slip_receive_room(struct tty_struct *tty) { return 65536; /* We can handle an infinite amount of data. :-) */ } /* * Handle the 'receiver data ready' interrupt. * This function is called by the 'tty_io' module in the kernel when * a block of SLIP data has been received, which can now be decapsulated * and sent on to some IP layer for further processing. */ static void slip_receive_buf(struct tty_struct *tty, const unsigned char *cp, char *fp, int count) { struct slip *sl = (struct slip *) tty->disc_data; if (!sl || sl->magic != SLIP_MAGIC || !netif_running(sl->dev)) return; /* Read the characters out of the buffer */ while (count--) { if (fp && *fp++) { if (!test_and_set_bit(SLF_ERROR, &sl->flags)) { sl->rx_errors++; } cp++; continue; } #ifdef CONFIG_SLIP_MODE_SLIP6 if (sl->mode & SL_MODE_SLIP6) slip_unesc6(sl, *cp++); else #endif slip_unesc(sl, *cp++); } } /************************************ * slip_open helper routines. ************************************/ /* Collect hanged up channels */ static void sl_sync(void) { int i; struct net_device *dev; struct slip *sl; for (i = 0; i < slip_maxdev; i++) { if ((dev = slip_devs[i]) == NULL) break; sl = dev->priv; if (sl->tty || sl->leased) continue; if (dev->flags&IFF_UP) dev_close(dev); } } /* Find a free SLIP channel, and link in this `tty' line. */ static struct slip * sl_alloc(dev_t line) { int i; int sel = -1; int score = -1; struct net_device *dev = NULL; struct slip *sl; if (slip_devs == NULL) return NULL; /* Master array missing ! */ for (i = 0; i < slip_maxdev; i++) { dev = slip_devs[i]; if (dev == NULL) break; sl = dev->priv; if (sl->leased) { if (sl->line != line) continue; if (sl->tty) return NULL; /* Clear ESCAPE & ERROR flags */ sl->flags &= (1 << SLF_INUSE); return sl; } if (sl->tty) continue; if (current->pid == sl->pid) { if (sl->line == line && score < 3) { sel = i; score = 3; continue; } if (score < 2) { sel = i; score = 2; } continue; } if (sl->line == line && score < 1) { sel = i; score = 1; continue; } if (score < 0) { sel = i; score = 0; } } if (sel >= 0) { i = sel; dev = slip_devs[i]; if (score > 1) { sl = dev->priv; sl->flags &= (1 << SLF_INUSE); return sl; } } /* Sorry, too many, all slots in use */ if (i >= slip_maxdev) return NULL; if (dev) { sl = dev->priv; if (test_bit(SLF_INUSE, &sl->flags)) { unregister_netdevice(dev); dev = NULL; slip_devs[i] = NULL; } } if (!dev) { char name[IFNAMSIZ]; sprintf(name, "sl%d", i); dev = alloc_netdev(sizeof(*sl), name, sl_setup); if (!dev) return NULL; dev->base_addr = i; } sl = dev->priv; /* Initialize channel control data */ sl->magic = SLIP_MAGIC; sl->dev = dev; spin_lock_init(&sl->lock); sl->mode = SL_MODE_DEFAULT; #ifdef CONFIG_SLIP_SMART init_timer(&sl->keepalive_timer); /* initialize timer_list struct */ sl->keepalive_timer.data=(unsigned long)sl; sl->keepalive_timer.function=sl_keepalive; init_timer(&sl->outfill_timer); sl->outfill_timer.data=(unsigned long)sl; sl->outfill_timer.function=sl_outfill; #endif slip_devs[i] = dev; return sl; } /* * Open the high-level part of the SLIP channel. * This function is called by the TTY module when the * SLIP line discipline is called for. Because we are * sure the tty line exists, we only have to link it to * a free SLIP channel... */ static int slip_open(struct tty_struct *tty) { struct slip *sl; int err; if(!capable(CAP_NET_ADMIN)) return -EPERM; /* RTnetlink lock is misused here to serialize concurrent opens of slip channels. There are better ways, but it is the simplest one. */ rtnl_lock(); /* Collect hanged up channels. */ sl_sync(); sl = (struct slip *) tty->disc_data; err = -EEXIST; /* First make sure we're not already connected. */ if (sl && sl->magic == SLIP_MAGIC) goto err_exit; /* OK. Find a free SLIP channel to use. */ err = -ENFILE; if ((sl = sl_alloc(tty->device)) == NULL) goto err_exit; sl->tty = tty; tty->disc_data = sl; sl->line = tty->device; sl->pid = current->pid; if (tty->driver->flush_buffer) tty->driver->flush_buffer(tty); if (tty->ldisc.flush_buffer) tty->ldisc.flush_buffer(tty); if (!test_bit(SLF_INUSE, &sl->flags)) { /* Perform the low-level SLIP initialization. */ if ((err = sl_alloc_bufs(sl, SL_MTU)) != 0) goto err_free_chan; set_bit(SLF_INUSE, &sl->flags); if ((err = register_netdevice(sl->dev))) goto err_free_bufs; } #ifdef CONFIG_SLIP_SMART if (sl->keepalive) { sl->keepalive_timer.expires=jiffies+sl->keepalive*HZ; add_timer (&sl->keepalive_timer); } if (sl->outfill) { sl->outfill_timer.expires=jiffies+sl->outfill*HZ; add_timer (&sl->outfill_timer); } #endif /* Done. We have linked the TTY line to a channel. */ rtnl_unlock(); return sl->dev->base_addr; err_free_bufs: sl_free_bufs(sl); err_free_chan: sl->tty = NULL; tty->disc_data = NULL; clear_bit(SLF_INUSE, &sl->flags); err_exit: rtnl_unlock(); /* Count references from TTY module */ return err; } /* Let me to blame a bit. 1. TTY module calls this funstion on soft interrupt. 2. TTY module calls this function WITH MASKED INTERRUPTS! 3. TTY module does not notify us about line discipline shutdown, Seems, now it is clean. The solution is to consider netdevice and line discipline sides as two independent threads. By-product (not desired): sl? does not feel hangups and remains open. It is supposed, that user level program (dip, diald, slattach...) will catch SIGHUP and make the rest of work. I see no way to make more with current tty code. --ANK */ /* * Close down a SLIP channel. * This means flushing out any pending queues, and then restoring the * TTY line discipline to what it was before it got hooked to SLIP * (which usually is TTY again). */ static void slip_close(struct tty_struct *tty) { struct slip *sl = (struct slip *) tty->disc_data; /* First make sure we're connected. */ if (!sl || sl->magic != SLIP_MAGIC || sl->tty != tty) return; tty->disc_data = 0; sl->tty = NULL; if (!sl->leased) sl->line = 0; /* VSV = very important to remove timers */ #ifdef CONFIG_SLIP_SMART del_timer_sync(&sl->keepalive_timer); del_timer_sync(&sl->outfill_timer); #endif /* Count references from TTY module */ } /************************************************************************ * STANDARD SLIP ENCAPSULATION * ************************************************************************/ int slip_esc(unsigned char *s, unsigned char *d, int len) { unsigned char *ptr = d; unsigned char c; /* * Send an initial END character to flush out any * data that may have accumulated in the receiver * due to line noise. */ *ptr++ = END; /* * For each byte in the packet, send the appropriate * character sequence, according to the SLIP protocol. */ while (len-- > 0) { switch(c = *s++) { case END: *ptr++ = ESC; *ptr++ = ESC_END; break; case ESC: *ptr++ = ESC; *ptr++ = ESC_ESC; break; default: *ptr++ = c; break; } } *ptr++ = END; return (ptr - d); } static void slip_unesc(struct slip *sl, unsigned char s) { switch(s) { case END: #ifdef CONFIG_SLIP_SMART /* drop keeptest bit = VSV */ if (test_bit(SLF_KEEPTEST, &sl->flags)) clear_bit(SLF_KEEPTEST, &sl->flags); #endif if (!test_and_clear_bit(SLF_ERROR, &sl->flags) && (sl->rcount > 2)) { sl_bump(sl); } clear_bit(SLF_ESCAPE, &sl->flags); sl->rcount = 0; return; case ESC: set_bit(SLF_ESCAPE, &sl->flags); return; case ESC_ESC: if (test_and_clear_bit(SLF_ESCAPE, &sl->flags)) { s = ESC; } break; case ESC_END: if (test_and_clear_bit(SLF_ESCAPE, &sl->flags)) { s = END; } break; } if (!test_bit(SLF_ERROR, &sl->flags)) { if (sl->rcount < sl->buffsize) { sl->rbuff[sl->rcount++] = s; return; } sl->rx_over_errors++; set_bit(SLF_ERROR, &sl->flags); } } #ifdef CONFIG_SLIP_MODE_SLIP6 /************************************************************************ * 6 BIT SLIP ENCAPSULATION * ************************************************************************/ int slip_esc6(unsigned char *s, unsigned char *d, int len) { unsigned char *ptr = d; unsigned char c; int i; unsigned short v = 0; short bits = 0; /* * Send an initial END character to flush out any * data that may have accumulated in the receiver * due to line noise. */ *ptr++ = 0x70; /* * Encode the packet into printable ascii characters */ for (i = 0; i < len; ++i) { v = (v << 8) | s[i]; bits += 8; while (bits >= 6) { bits -= 6; c = 0x30 + ((v >> bits) & 0x3F); *ptr++ = c; } } if (bits) { c = 0x30 + ((v << (6 - bits)) & 0x3F); *ptr++ = c; } *ptr++ = 0x70; return ptr - d; } void slip_unesc6(struct slip *sl, unsigned char s) { unsigned char c; if (s == 0x70) { #ifdef CONFIG_SLIP_SMART /* drop keeptest bit = VSV */ if (test_bit(SLF_KEEPTEST, &sl->flags)) clear_bit(SLF_KEEPTEST, &sl->flags); #endif if (!test_and_clear_bit(SLF_ERROR, &sl->flags) && (sl->rcount > 2)) { sl_bump(sl); } sl->rcount = 0; sl->xbits = 0; sl->xdata = 0; } else if (s >= 0x30 && s < 0x70) { sl->xdata = (sl->xdata << 6) | ((s - 0x30) & 0x3F); sl->xbits += 6; if (sl->xbits >= 8) { sl->xbits -= 8; c = (unsigned char)(sl->xdata >> sl->xbits); if (!test_bit(SLF_ERROR, &sl->flags)) { if (sl->rcount < sl->buffsize) { sl->rbuff[sl->rcount++] = c; return; } sl->rx_over_errors++; set_bit(SLF_ERROR, &sl->flags); } } } } #endif /* CONFIG_SLIP_MODE_SLIP6 */ /* Perform I/O control on an active SLIP channel. */ static int slip_ioctl(struct tty_struct *tty, struct file *file, unsigned int cmd, unsigned long arg) { struct slip *sl = (struct slip *) tty->disc_data; unsigned int tmp; /* First make sure we're connected. */ if (!sl || sl->magic != SLIP_MAGIC) { return -EINVAL; } switch(cmd) { case SIOCGIFNAME: tmp = strlen(sl->dev->name) + 1; if (copy_to_user((void *)arg, sl->dev->name, tmp)) return -EFAULT; return 0; case SIOCGIFENCAP: if (put_user(sl->mode, (int *)arg)) return -EFAULT; return 0; case SIOCSIFENCAP: if (get_user(tmp,(int *)arg)) return -EFAULT; #ifndef SL_INCLUDE_CSLIP if (tmp & (SL_MODE_CSLIP|SL_MODE_ADAPTIVE)) { return -EINVAL; } #else if ((tmp & (SL_MODE_ADAPTIVE | SL_MODE_CSLIP)) == (SL_MODE_ADAPTIVE | SL_MODE_CSLIP)) { /* return -EINVAL; */ tmp &= ~SL_MODE_ADAPTIVE; } #endif #ifndef CONFIG_SLIP_MODE_SLIP6 if (tmp & SL_MODE_SLIP6) { return -EINVAL; } #endif sl->mode = tmp; sl->dev->type = ARPHRD_SLIP+sl->mode; return 0; case SIOCSIFHWADDR: return -EINVAL; #ifdef CONFIG_SLIP_SMART /* VSV changes start here */ case SIOCSKEEPALIVE: if (get_user(tmp,(int *)arg)) return -EFAULT; if (tmp > 255) /* max for unchar */ return -EINVAL; spin_lock_bh(&sl->lock); if (!sl->tty) { spin_unlock_bh(&sl->lock); return -ENODEV; } if ((sl->keepalive = (unchar) tmp) != 0) { mod_timer(&sl->keepalive_timer, jiffies+sl->keepalive*HZ); set_bit(SLF_KEEPTEST, &sl->flags); } else { del_timer (&sl->keepalive_timer); } spin_unlock_bh(&sl->lock); return 0; case SIOCGKEEPALIVE: if (put_user(sl->keepalive, (int *)arg)) return -EFAULT; return 0; case SIOCSOUTFILL: if (get_user(tmp,(int *)arg)) return -EFAULT; if (tmp > 255) /* max for unchar */ return -EINVAL; spin_lock_bh(&sl->lock); if (!sl->tty) { spin_unlock_bh(&sl->lock); return -ENODEV; } if ((sl->outfill = (unchar) tmp) != 0){ mod_timer(&sl->outfill_timer, jiffies+sl->outfill*HZ); set_bit(SLF_OUTWAIT, &sl->flags); } else { del_timer (&sl->outfill_timer); } spin_unlock_bh(&sl->lock); return 0; case SIOCGOUTFILL: if (put_user(sl->outfill, (int *)arg)) return -EFAULT; return 0; /* VSV changes end */ #endif /* Allow stty to read, but not set, the serial port */ case TCGETS: case TCGETA: return n_tty_ioctl(tty, file, cmd, arg); default: return -ENOIOCTLCMD; } } /* VSV changes start here */ #ifdef CONFIG_SLIP_SMART /* function do_ioctl called from net/core/dev.c to allow get/set outfill/keepalive parameter by ifconfig */ static int sl_ioctl(struct net_device *dev,struct ifreq *rq,int cmd) { struct slip *sl = (struct slip*)(dev->priv); if (sl == NULL) /* Allocation failed ?? */ return -ENODEV; spin_lock_bh(&sl->lock); if (!sl->tty) { spin_unlock_bh(&sl->lock); return -ENODEV; } switch(cmd){ case SIOCSKEEPALIVE: /* max for unchar */ if (((unsigned int)((unsigned long)rq->ifr_data)) > 255) { spin_unlock_bh(&sl->lock); return -EINVAL; } sl->keepalive = (unchar) ((unsigned long)rq->ifr_data); if (sl->keepalive != 0) { sl->keepalive_timer.expires=jiffies+sl->keepalive*HZ; mod_timer(&sl->keepalive_timer, jiffies+sl->keepalive*HZ); set_bit(SLF_KEEPTEST, &sl->flags); } else { del_timer(&sl->keepalive_timer); } break; case SIOCGKEEPALIVE: rq->ifr_data=(caddr_t)((unsigned long)sl->keepalive); break; case SIOCSOUTFILL: if (((unsigned)((unsigned long)rq->ifr_data)) > 255) { /* max for unchar */ spin_unlock_bh(&sl->lock); return -EINVAL; } if ((sl->outfill = (unchar)((unsigned long) rq->ifr_data)) != 0){ mod_timer(&sl->outfill_timer, jiffies+sl->outfill*HZ); set_bit(SLF_OUTWAIT, &sl->flags); } else { del_timer (&sl->outfill_timer); } break; case SIOCGOUTFILL: rq->ifr_data=(caddr_t)((unsigned long)sl->outfill); break; case SIOCSLEASE: /* Resolve race condition, when ioctl'ing hanged up and opened by another process device. */ if (sl->tty != current->tty && sl->pid != current->pid) { spin_unlock_bh(&sl->lock); return -EPERM; } sl->leased = 0; if ((unsigned long)rq->ifr_data) sl->leased = 1; break; case SIOCGLEASE: rq->ifr_data=(caddr_t)((unsigned long)sl->leased); }; spin_unlock_bh(&sl->lock); return 0; } #endif /* VSV changes end */ static struct tty_ldisc sl_ldisc = { .owner = THIS_MODULE, .magic = TTY_LDISC_MAGIC, .name = "slip", .open = slip_open, .close = slip_close, .ioctl = slip_ioctl, .receive_buf = slip_receive_buf, .receive_room = slip_receive_room, .write_wakeup = slip_write_wakeup, }; static int __init slip_init(void) { int status; if (slip_maxdev < 4) slip_maxdev = 4; /* Sanity */ printk(KERN_INFO "SLIP: version %s (dynamic channels, max=%d)" #ifdef CONFIG_SLIP_MODE_SLIP6 " (6 bit encapsulation enabled)" #endif ".\n", SLIP_VERSION, slip_maxdev ); #if defined(SL_INCLUDE_CSLIP) printk(KERN_INFO "CSLIP: code copyright 1989 Regents of the University of California.\n"); #endif #ifdef CONFIG_SLIP_SMART printk(KERN_INFO "SLIP linefill/keepalive option.\n"); #endif slip_devs = kmalloc(sizeof(struct net_device *)*slip_maxdev, GFP_KERNEL); if (!slip_devs) { printk(KERN_ERR "SLIP: Can't allocate slip devices array! Uaargh! (-> No SLIP available)\n"); return -ENOMEM; } /* Clear the pointer array, we allocate devices when we need them */ memset(slip_devs, 0, sizeof(struct net_device *)*slip_maxdev); /* Fill in our line protocol discipline, and register it */ if ((status = tty_register_ldisc(N_SLIP, &sl_ldisc)) != 0) { printk(KERN_ERR "SLIP: can't register line discipline (err = %d)\n", status); } return status; } static void __exit slip_exit(void) { int i; struct net_device *dev; struct slip *sl; unsigned long timeout = jiffies + HZ; int busy = 0; if (slip_devs == NULL) return; /* First of all: check for active disciplines and hangup them. */ do { if (busy) { current->state = TASK_INTERRUPTIBLE; schedule_timeout(HZ / 10); current->state = TASK_RUNNING; } busy = 0; for (i = 0; i < slip_maxdev; i++) { dev = slip_devs[i]; if (!dev) continue; sl = dev->priv; spin_lock_bh(&sl->lock); if (sl->tty) { busy++; tty_hangup(sl->tty); } spin_unlock_bh(&sl->lock); } } while (busy && time_before(jiffies, timeout)); for (i = 0; i < slip_maxdev; i++) { dev = slip_devs[i]; if (!dev) continue; slip_devs[i] = NULL; sl = dev->priv; if (sl->tty) { printk(KERN_ERR "%s: tty discipline still running\n", dev->name); /* Intentionally leak the control block. */ dev->destructor = NULL; } unregister_netdev(dev); } kfree(slip_devs); slip_devs = NULL; if ((i = tty_register_ldisc(N_SLIP, NULL))) { printk(KERN_ERR "SLIP: can't unregister line discipline (err = %d)\n", i); } } module_init(slip_init); module_exit(slip_exit); #ifdef CONFIG_SLIP_SMART /* * This is start of the code for multislip style line checking * added by Stanislav Voronyi. All changes before marked VSV */ static void sl_outfill(unsigned long sls) { struct slip *sl=(struct slip *)sls; spin_lock(&sl->lock); if (sl->tty == NULL) goto out; if(sl->outfill) { if( test_bit(SLF_OUTWAIT, &sl->flags) ) { /* no packets were transmitted, do outfill */ #ifdef CONFIG_SLIP_MODE_SLIP6 unsigned char s = (sl->mode & SL_MODE_SLIP6)?0x70:END; #else unsigned char s = END; #endif /* put END into tty queue. Is it right ??? */ if (!netif_queue_stopped(sl->dev)) { /* if device busy no outfill */ sl->tty->driver->write(sl->tty, 0, &s, 1); } } else set_bit(SLF_OUTWAIT, &sl->flags); mod_timer(&sl->outfill_timer, jiffies+sl->outfill*HZ); } out: spin_unlock(&sl->lock); } static void sl_keepalive(unsigned long sls) { struct slip *sl=(struct slip *)sls; spin_lock(&sl->lock); if (sl->tty == NULL) goto out; if( sl->keepalive) { if(test_bit(SLF_KEEPTEST, &sl->flags)) { /* keepalive still high :(, we must hangup */ if( sl->outfill ) /* outfill timer must be deleted too */ (void)del_timer(&sl->outfill_timer); printk(KERN_DEBUG "%s: no packets received during keepalive timeout, hangup.\n", sl->dev->name); tty_hangup(sl->tty); /* this must hangup tty & close slip */ /* I think we need not something else */ goto out; } else set_bit(SLF_KEEPTEST, &sl->flags); mod_timer(&sl->keepalive_timer, jiffies+sl->keepalive*HZ); } out: spin_unlock(&sl->lock); } #endif MODULE_LICENSE("GPL");
codinuum/cca
samples/c/1/slip.c
C
apache-2.0
35,565
//// [lastPropertyInLiteralWins.ts] interface Thing { thunk: (str: string) => void; } function test(thing: Thing) { thing.thunk("str"); } test({ // Should error, as last one wins, and is wrong type thunk: (str: string) => {}, thunk: (num: number) => {} }); test({ // Should be OK. Last 'thunk' is of correct type thunk: (num: number) => {}, thunk: (str: string) => {} }); //// [lastPropertyInLiteralWins.js] function test(thing) { thing.thunk("str"); } test({ thunk: function (str) { }, thunk: function (num) { } }); test({ thunk: function (num) { }, thunk: function (str) { } });
freedot/tstolua
tests/baselines/reference/lastPropertyInLiteralWins.js
JavaScript
apache-2.0
629
/* * Copyright 2021 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.score.buildin.hardmediumsoftbigdecimal; import java.math.BigDecimal; import java.util.LinkedHashMap; import java.util.Map; import org.kie.api.definition.rule.Rule; import org.kie.api.runtime.rule.RuleContext; import org.optaplanner.core.api.domain.constraintweight.ConstraintConfiguration; import org.optaplanner.core.api.domain.constraintweight.ConstraintWeight; import org.optaplanner.core.api.score.buildin.hardmediumsoftbigdecimal.HardMediumSoftBigDecimalScore; import org.optaplanner.core.api.score.buildin.hardmediumsoftbigdecimal.HardMediumSoftBigDecimalScoreHolder; import org.optaplanner.core.impl.score.holder.AbstractScoreHolder; /** * @see HardMediumSoftBigDecimalScore */ public final class HardMediumSoftBigDecimalScoreHolderImpl extends AbstractScoreHolder<HardMediumSoftBigDecimalScore> implements HardMediumSoftBigDecimalScoreHolder { protected final Map<Rule, BigDecimalMatchExecutor> matchExecutorByNumberMap = new LinkedHashMap<>(); /** Slower than {@link #matchExecutorByNumberMap} */ protected final Map<Rule, ScoreMatchExecutor<HardMediumSoftBigDecimalScore>> matchExecutorByScoreMap = new LinkedHashMap<>(); protected BigDecimal hardScore = BigDecimal.ZERO; protected BigDecimal mediumScore = BigDecimal.ZERO; protected BigDecimal softScore = BigDecimal.ZERO; public HardMediumSoftBigDecimalScoreHolderImpl(boolean constraintMatchEnabled) { super(constraintMatchEnabled, HardMediumSoftBigDecimalScore.ZERO); } public BigDecimal getHardScore() { return hardScore; } public BigDecimal getMediumScore() { return mediumScore; } public BigDecimal getSoftScore() { return softScore; } // ************************************************************************ // Setup methods // ************************************************************************ @Override public void configureConstraintWeight(Rule rule, HardMediumSoftBigDecimalScore constraintWeight) { super.configureConstraintWeight(rule, constraintWeight); BigDecimalMatchExecutor matchExecutor; if (constraintWeight.equals(HardMediumSoftBigDecimalScore.ZERO)) { matchExecutor = (RuleContext kcontext, BigDecimal matchWeight) -> { }; } else if (constraintWeight.getMediumScore().equals(BigDecimal.ZERO) && constraintWeight.getSoftScore().equals(BigDecimal.ZERO)) { matchExecutor = (RuleContext kcontext, BigDecimal matchWeight) -> addHardConstraintMatch(kcontext, constraintWeight.getHardScore().multiply(matchWeight)); } else if (constraintWeight.getHardScore().equals(BigDecimal.ZERO) && constraintWeight.getSoftScore().equals(BigDecimal.ZERO)) { matchExecutor = (RuleContext kcontext, BigDecimal matchWeight) -> addMediumConstraintMatch( kcontext, constraintWeight.getMediumScore().multiply(matchWeight)); } else if (constraintWeight.getHardScore().equals(BigDecimal.ZERO) && constraintWeight.getMediumScore().equals(BigDecimal.ZERO)) { matchExecutor = (RuleContext kcontext, BigDecimal matchWeight) -> addSoftConstraintMatch(kcontext, constraintWeight.getSoftScore().multiply(matchWeight)); } else { matchExecutor = (RuleContext kcontext, BigDecimal matchWeight) -> addMultiConstraintMatch( kcontext, constraintWeight.getHardScore().multiply(matchWeight), constraintWeight.getMediumScore().multiply(matchWeight), constraintWeight.getSoftScore().multiply(matchWeight)); } matchExecutorByNumberMap.put(rule, matchExecutor); matchExecutorByScoreMap.put(rule, (RuleContext kcontext, HardMediumSoftBigDecimalScore weightMultiplier) -> addMultiConstraintMatch(kcontext, constraintWeight.getHardScore().multiply(weightMultiplier.getHardScore()), constraintWeight.getMediumScore().multiply(weightMultiplier.getMediumScore()), constraintWeight.getSoftScore().multiply(weightMultiplier.getSoftScore()))); } // ************************************************************************ // Penalize and reward methods // ************************************************************************ @Override public void penalize(RuleContext kcontext) { impactScore(kcontext, BigDecimal.ONE.negate()); } @Override public void penalize(RuleContext kcontext, BigDecimal weightMultiplier) { impactScore(kcontext, weightMultiplier.negate()); } @Override public void penalize(RuleContext kcontext, BigDecimal hardWeightMultiplier, BigDecimal mediumWeightMultiplier, BigDecimal softWeightMultiplier) { impactScore(kcontext, hardWeightMultiplier.negate(), mediumWeightMultiplier.negate(), softWeightMultiplier.negate()); } @Override public void reward(RuleContext kcontext) { impactScore(kcontext, BigDecimal.ONE); } @Override public void reward(RuleContext kcontext, BigDecimal weightMultiplier) { impactScore(kcontext, weightMultiplier); } @Override public void reward(RuleContext kcontext, BigDecimal hardWeightMultiplier, BigDecimal mediumWeightMultiplier, BigDecimal softWeightMultiplier) { impactScore(kcontext, hardWeightMultiplier, mediumWeightMultiplier, softWeightMultiplier); } @Override public void impactScore(RuleContext kcontext) { impactScore(kcontext, BigDecimal.ONE); } @Override public void impactScore(RuleContext kcontext, int weightMultiplier) { impactScore(kcontext, BigDecimal.valueOf(weightMultiplier)); } @Override public void impactScore(RuleContext kcontext, long weightMultiplier) { impactScore(kcontext, BigDecimal.valueOf(weightMultiplier)); } @Override public void impactScore(RuleContext kcontext, BigDecimal weightMultiplier) { Rule rule = kcontext.getRule(); BigDecimalMatchExecutor matchExecutor = matchExecutorByNumberMap.get(rule); if (matchExecutor == null) { throw new IllegalStateException("The DRL rule (" + rule.getPackageName() + ":" + rule.getName() + ") does not match a @" + ConstraintWeight.class.getSimpleName() + " on the @" + ConstraintConfiguration.class.getSimpleName() + " annotated class."); } matchExecutor.accept(kcontext, weightMultiplier); } private void impactScore(RuleContext kcontext, BigDecimal hardWeightMultiplier, BigDecimal mediumWeightMultiplier, BigDecimal softWeightMultiplier) { Rule rule = kcontext.getRule(); ScoreMatchExecutor<HardMediumSoftBigDecimalScore> matchExecutor = matchExecutorByScoreMap.get(rule); if (matchExecutor == null) { throw new IllegalStateException("The DRL rule (" + rule.getPackageName() + ":" + rule.getName() + ") does not match a @" + ConstraintWeight.class.getSimpleName() + " on the @" + ConstraintConfiguration.class.getSimpleName() + " annotated class."); } matchExecutor.accept(kcontext, HardMediumSoftBigDecimalScore.of(hardWeightMultiplier, mediumWeightMultiplier, softWeightMultiplier)); } // ************************************************************************ // Other match methods // ************************************************************************ @Override public void addHardConstraintMatch(RuleContext kcontext, BigDecimal hardWeight) { hardScore = hardScore.add(hardWeight); registerConstraintMatch(kcontext, () -> hardScore = hardScore.subtract(hardWeight), () -> HardMediumSoftBigDecimalScore.ofHard(hardWeight)); } @Override public void addMediumConstraintMatch(RuleContext kcontext, BigDecimal mediumWeight) { mediumScore = mediumScore.add(mediumWeight); registerConstraintMatch(kcontext, () -> mediumScore = mediumScore.subtract(mediumWeight), () -> HardMediumSoftBigDecimalScore.ofMedium(mediumWeight)); } @Override public void addSoftConstraintMatch(RuleContext kcontext, BigDecimal softWeight) { softScore = softScore.add(softWeight); registerConstraintMatch(kcontext, () -> softScore = softScore.subtract(softWeight), () -> HardMediumSoftBigDecimalScore.ofSoft(softWeight)); } @Override public void addMultiConstraintMatch(RuleContext kcontext, BigDecimal hardWeight, BigDecimal mediumWeight, BigDecimal softWeight) { hardScore = hardScore.add(hardWeight); mediumScore = mediumScore.add(mediumWeight); softScore = softScore.add(softWeight); registerConstraintMatch(kcontext, () -> { hardScore = hardScore.subtract(hardWeight); mediumScore = mediumScore.subtract(mediumWeight); softScore = softScore.subtract(softWeight); }, () -> HardMediumSoftBigDecimalScore.of(hardWeight, mediumWeight, softWeight)); } @Override public HardMediumSoftBigDecimalScore extractScore(int initScore) { return HardMediumSoftBigDecimalScore.ofUninitialized(initScore, hardScore, mediumScore, softScore); } }
tkobayas/optaplanner
optaplanner-core/src/main/java/org/optaplanner/core/impl/score/buildin/hardmediumsoftbigdecimal/HardMediumSoftBigDecimalScoreHolderImpl.java
Java
apache-2.0
10,319
package com.show.tt.imservice.manager; import com.google.protobuf.CodedInputStream; import com.show.tt.config.DBConstant; import com.show.tt.DB.DBInterface; import com.show.tt.DB.entity.GroupEntity; import com.show.tt.DB.entity.SessionEntity; import com.show.tt.DB.entity.UserEntity; import com.show.tt.imservice.callback.Packetlistener; import com.show.tt.imservice.event.GroupEvent; import com.show.tt.imservice.event.SessionEvent; import com.show.tt.protobuf.helper.EntityChangeEngine; import com.show.tt.protobuf.helper.ProtoBuf2JavaBean; import com.show.tt.protobuf.IMBaseDefine; import com.show.tt.protobuf.IMGroup; import com.show.tt.utils.IMUIHelper; import com.show.tt.utils.Logger; import com.show.tt.utils.pinyin.PinYin; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import de.greenrobot.event.EventBus; public class IMGroupManager extends IMManager { private Logger logger = Logger.getLogger(IMGroupManager.class); private static IMGroupManager inst = new IMGroupManager(); public static IMGroupManager instance() { return inst; } // 依赖的服务管理 private IMSocketManager imSocketManager = IMSocketManager.instance(); private IMLoginManager imLoginManager=IMLoginManager.instance(); private DBInterface dbInterface = DBInterface.instance(); // todo Pinyin的处理 //正式群,临时群都会有的,存在竞争 如果不同时请求的话 private Map<Integer,GroupEntity> groupMap = new ConcurrentHashMap<>(); // 群组状态 private boolean isGroupReady = false; @Override public void doOnStart() { groupMap.clear(); } public void onNormalLoginOk(){ onLocalLoginOk(); onLocalNetOk(); } /** * 1. 加载本地信息 * 2. 请求正规群信息 , 与本地进行对比 * 3. version groupId 请求 * */ public void onLocalLoginOk(){ logger.i("group#loadFromDb"); if(!EventBus.getDefault().isRegistered(inst)){ EventBus.getDefault().registerSticky(inst); } // 加载本地group List<GroupEntity> localGroupInfoList = dbInterface.loadAllGroup(); for(GroupEntity groupInfo:localGroupInfoList){ groupMap.put(groupInfo.getPeerId(),groupInfo); } triggerEvent(new GroupEvent(GroupEvent.Event.GROUP_INFO_OK)); } public void onLocalNetOk(){ reqGetNormalGroupList(); } @Override public void reset() { isGroupReady =false; groupMap.clear(); EventBus.getDefault().unregister(inst); } public void onEvent(SessionEvent event){ switch (event){ case RECENT_SESSION_LIST_UPDATE: // groupMap 本地已经加载完毕之后才触发 loadSessionGroupInfo(); break; } } /** * 实现自身的事件驱动 * @param event */ public synchronized void triggerEvent(GroupEvent event) { switch (event.getEvent()){ case GROUP_INFO_OK: isGroupReady = true; break; case GROUP_INFO_UPDATED: isGroupReady = true; break; } EventBus.getDefault().postSticky(event); } /**---------------事件驱动end------------------------------*/ /** * 1. 加载本地信息 * 2. 从session中获取 群组信息,从本地中获取这些群组的version信息 * 3. 合并上述的merge结果, version groupId 请求 * */ private void loadSessionGroupInfo(){ logger.i("group#loadSessionGroupInfo"); List<SessionEntity> sessionInfoList = IMSessionManager.instance().getRecentSessionList(); List<IMBaseDefine.GroupVersionInfo> needReqList = new ArrayList<>(); for(SessionEntity sessionInfo:sessionInfoList){ int version = 0; if(sessionInfo.getPeerType() == DBConstant.SESSION_TYPE_GROUP /**群组*/){ if(groupMap.containsKey(sessionInfo.getPeerId())){ version = groupMap.get(sessionInfo.getPeerId()).getVersion(); } IMBaseDefine.GroupVersionInfo versionInfo = IMBaseDefine.GroupVersionInfo.newBuilder() .setVersion(version) .setGroupId(sessionInfo.getPeerId()) .build(); needReqList.add(versionInfo); } } // 事件触发的时候需要注意 if(needReqList.size() >0){ reqGetGroupDetailInfo(needReqList); return ; } } /** * 联系人页面正式群的请求 * todo 正式群与临时群逻辑上的分开的,但是底层应该是想通的 */ private void reqGetNormalGroupList() { logger.i("group#reqGetNormalGroupList"); int loginId = imLoginManager.getLoginId(); IMGroup.IMNormalGroupListReq normalGroupListReq = IMGroup.IMNormalGroupListReq.newBuilder() .setUserId(loginId) .build(); int sid = IMBaseDefine.ServiceID.SID_GROUP_VALUE; int cid = IMBaseDefine.GroupCmdID.CID_GROUP_NORMAL_LIST_REQUEST_VALUE; imSocketManager.sendRequest(normalGroupListReq,sid,cid); logger.i("group#send packet to server"); } public void onRepNormalGroupList(IMGroup.IMNormalGroupListRsp normalGroupListRsp) { logger.i("group#onRepNormalGroupList"); int groupSize = normalGroupListRsp.getGroupVersionListCount(); logger.i("group#onRepNormalGroupList cnt:%d",groupSize); List<IMBaseDefine.GroupVersionInfo> versionInfoList = normalGroupListRsp.getGroupVersionListList(); /**对比DB中的version字段*/ // 这块对比的可以抽离出来 List<IMBaseDefine.GroupVersionInfo> needInfoList = new ArrayList<>(); for(IMBaseDefine.GroupVersionInfo groupVersionInfo:versionInfoList ){ int groupId = groupVersionInfo.getGroupId(); int version = groupVersionInfo.getVersion(); if(groupMap.containsKey(groupId) && groupMap.get(groupId).getVersion() ==version ){ continue; } IMBaseDefine.GroupVersionInfo versionInfo = IMBaseDefine.GroupVersionInfo.newBuilder() .setVersion(0) .setGroupId(groupId) .build(); needInfoList.add(versionInfo); } // 事件触发的时候需要注意 todo if(needInfoList.size() >0){ reqGetGroupDetailInfo(needInfoList); } } public void reqGroupDetailInfo(int groupId){ IMBaseDefine.GroupVersionInfo groupVersionInfo = IMBaseDefine.GroupVersionInfo.newBuilder() .setGroupId(groupId) .setVersion(0) .build(); ArrayList<IMBaseDefine.GroupVersionInfo> list = new ArrayList<>(); list.add(groupVersionInfo); reqGetGroupDetailInfo(list); } /** * 请求群组的详细信息 */ public void reqGetGroupDetailInfo(List<IMBaseDefine.GroupVersionInfo> versionInfoList){ logger.i("group#reqGetGroupDetailInfo"); if(versionInfoList == null || versionInfoList.size()<=0){ logger.e("group#reqGetGroupDetailInfo# please check your params,cause by empty/null"); return ; } int loginId = imLoginManager.getLoginId(); IMGroup.IMGroupInfoListReq groupInfoListReq = IMGroup.IMGroupInfoListReq.newBuilder() .setUserId(loginId) .addAllGroupVersionList(versionInfoList) .build(); int sid = IMBaseDefine.ServiceID.SID_GROUP_VALUE; int cid = IMBaseDefine.GroupCmdID.CID_GROUP_INFO_REQUEST_VALUE; imSocketManager.sendRequest(groupInfoListReq,sid,cid); } public void onRepGroupDetailInfo(IMGroup.IMGroupInfoListRsp groupInfoListRsp){ logger.i("group#onRepGroupDetailInfo"); int groupSize = groupInfoListRsp.getGroupInfoListCount(); int userId = groupInfoListRsp.getUserId(); int loginId = imLoginManager.getLoginId(); logger.i("group#onRepGroupDetailInfo cnt:%d",groupSize); if(groupSize <=0 || userId!=loginId){ logger.i("group#onRepGroupDetailInfo size empty or userid[%d]≠ loginId[%d]",userId,loginId); return; } ArrayList<GroupEntity> needDb = new ArrayList<>(); for(IMBaseDefine.GroupInfo groupInfo:groupInfoListRsp.getGroupInfoListList()){ // 群组的详细信息 // 保存在DB中 // GroupManager 中的变量 GroupEntity groupEntity = ProtoBuf2JavaBean.getGroupEntity(groupInfo); groupMap.put(groupEntity.getPeerId(),groupEntity); needDb.add(groupEntity); } dbInterface.batchInsertOrUpdateGroup(needDb); triggerEvent(new GroupEvent(GroupEvent.Event.GROUP_INFO_UPDATED)); } /** * 创建群 * 默认是创建临时群,且客户端只能创建临时群 */ public void reqCreateTempGroup(String groupName, Set<Integer> memberList){ logger.i("group#reqCreateTempGroup, tempGroupName = %s", groupName); int loginId = imLoginManager.getLoginId(); IMGroup.IMGroupCreateReq groupCreateReq = IMGroup.IMGroupCreateReq.newBuilder() .setUserId(loginId) .setGroupType(IMBaseDefine.GroupType.GROUP_TYPE_TMP) .setGroupName(groupName) .setGroupAvatar("")// todo 群头像 现在是四宫格 .addAllMemberIdList(memberList) .build(); int sid = IMBaseDefine.ServiceID.SID_GROUP_VALUE; int cid = IMBaseDefine.GroupCmdID.CID_GROUP_CREATE_REQUEST_VALUE; imSocketManager.sendRequest(groupCreateReq, sid, cid,new Packetlistener() { @Override public void onSuccess(Object response) { try { IMGroup.IMGroupCreateRsp groupCreateRsp = IMGroup.IMGroupCreateRsp.parseFrom((CodedInputStream)response); IMGroupManager.instance().onReqCreateTempGroup(groupCreateRsp); } catch (IOException e) { logger.e("reqCreateTempGroup parse error"); triggerEvent(new GroupEvent(GroupEvent.Event.CREATE_GROUP_FAIL)); } } @Override public void onFaild() { triggerEvent(new GroupEvent(GroupEvent.Event.CREATE_GROUP_FAIL)); } @Override public void onTimeout() { triggerEvent(new GroupEvent(GroupEvent.Event.CREATE_GROUP_TIMEOUT)); } }); } public void onReqCreateTempGroup(IMGroup.IMGroupCreateRsp groupCreateRsp){ logger.d("group#onReqCreateTempGroup"); int resultCode = groupCreateRsp.getResultCode(); if(0 != resultCode){ logger.e("group#createGroup failed"); triggerEvent(new GroupEvent(GroupEvent.Event.CREATE_GROUP_FAIL)); return; } GroupEntity groupEntity = ProtoBuf2JavaBean.getGroupEntity(groupCreateRsp); // 更新DB 更新map groupMap.put(groupEntity.getPeerId(),groupEntity); IMSessionManager.instance().updateSession(groupEntity); dbInterface.insertOrUpdateGroup(groupEntity); triggerEvent(new GroupEvent(GroupEvent.Event.CREATE_GROUP_OK, groupEntity)); // 接收到之后修改UI } /** * 删除群成员 * REMOVE_CHANGE_MEMBER_TYPE * 可能会触发头像的修改 */ public void reqRemoveGroupMember(int groupId,Set<Integer> removeMemberlist){ reqChangeGroupMember(groupId,IMBaseDefine.GroupModifyType.GROUP_MODIFY_TYPE_DEL, removeMemberlist); } /** * 新增群成员 * ADD_CHANGE_MEMBER_TYPE * 可能会触发头像的修改 */ public void reqAddGroupMember(int groupId,Set<Integer> addMemberlist){ reqChangeGroupMember(groupId,IMBaseDefine.GroupModifyType.GROUP_MODIFY_TYPE_ADD, addMemberlist); } private void reqChangeGroupMember(int groupId,IMBaseDefine.GroupModifyType groupModifyType, Set<Integer> changeMemberlist) { logger.i("group#reqChangeGroupMember, changeGroupMemberType = %s", groupModifyType.toString()); final int loginId = imLoginManager.getLoginId(); IMGroup.IMGroupChangeMemberReq groupChangeMemberReq = IMGroup.IMGroupChangeMemberReq.newBuilder() .setUserId(loginId) .setChangeType(groupModifyType) .addAllMemberIdList(changeMemberlist) .setGroupId(groupId) .build(); int sid = IMBaseDefine.ServiceID.SID_GROUP_VALUE; int cid = IMBaseDefine.GroupCmdID.CID_GROUP_CHANGE_MEMBER_REQUEST_VALUE; imSocketManager.sendRequest(groupChangeMemberReq, sid, cid,new Packetlistener() { @Override public void onSuccess(Object response) { try { IMGroup.IMGroupChangeMemberRsp groupChangeMemberRsp = IMGroup.IMGroupChangeMemberRsp.parseFrom((CodedInputStream)response); IMGroupManager.instance().onReqChangeGroupMember(groupChangeMemberRsp); } catch (IOException e) { logger.e("reqChangeGroupMember parse error!"); triggerEvent(new GroupEvent(GroupEvent.Event.CHANGE_GROUP_MEMBER_FAIL)); } } @Override public void onFaild() { triggerEvent(new GroupEvent(GroupEvent.Event.CHANGE_GROUP_MEMBER_FAIL)); } @Override public void onTimeout() { triggerEvent(new GroupEvent(GroupEvent.Event.CHANGE_GROUP_MEMBER_TIMEOUT)); } }); } public void onReqChangeGroupMember(IMGroup.IMGroupChangeMemberRsp groupChangeMemberRsp){ int resultCode = groupChangeMemberRsp.getResultCode(); if (0 != resultCode){ triggerEvent(new GroupEvent(GroupEvent.Event.CHANGE_GROUP_MEMBER_FAIL)); return; } int groupId = groupChangeMemberRsp.getGroupId(); List<Integer> changeUserIdList = groupChangeMemberRsp.getChgUserIdListList(); IMBaseDefine.GroupModifyType groupModifyType = groupChangeMemberRsp.getChangeType(); GroupEntity groupEntityRet = groupMap.get(groupId); groupEntityRet.setlistGroupMemberIds(groupChangeMemberRsp.getCurUserIdListList()); groupMap.put(groupId,groupEntityRet); dbInterface.insertOrUpdateGroup(groupEntityRet); GroupEvent groupEvent = new GroupEvent(GroupEvent.Event.CHANGE_GROUP_MEMBER_SUCCESS); groupEvent.setChangeList(changeUserIdList); groupEvent.setChangeType(ProtoBuf2JavaBean.getGroupChangeType(groupModifyType)); groupEvent.setGroupEntity(groupEntityRet); triggerEvent(groupEvent); } /** * 屏蔽群消息 * IMGroupShieldReq * 备注:应为屏蔽之后大部分操作依旧需要客户端做 * */ public void reqShieldGroup(final int groupId,final int shieldType){ final GroupEntity entity = groupMap.get(groupId); if(entity == null){ logger.i("GroupEntity do not exist!"); return; } final int loginId = IMLoginManager.instance().getLoginId(); IMGroup.IMGroupShieldReq shieldReq = IMGroup.IMGroupShieldReq.newBuilder() .setShieldStatus(shieldType) .setGroupId(groupId) .setUserId(loginId) .build(); int sid = IMBaseDefine.ServiceID.SID_GROUP_VALUE; int cid = IMBaseDefine.GroupCmdID.CID_GROUP_SHIELD_GROUP_REQUEST_VALUE; imSocketManager.sendRequest(shieldReq,sid,cid,new Packetlistener() { @Override public void onSuccess(Object response) { try { IMGroup.IMGroupShieldRsp groupShieldRsp = IMGroup.IMGroupShieldRsp.parseFrom((CodedInputStream)response); int resCode = groupShieldRsp.getResultCode(); if(resCode !=0){ triggerEvent(new GroupEvent(GroupEvent.Event.SHIELD_GROUP_FAIL)); return; } if(groupShieldRsp.getGroupId() != groupId || groupShieldRsp.getUserId()!=loginId){ return; } // 更新DB状态 entity.setStatus(shieldType); dbInterface.insertOrUpdateGroup(entity); // 更改未读计数状态 boolean isFor = shieldType == DBConstant.GROUP_STATUS_SHIELD; IMUnreadMsgManager.instance().setForbidden( EntityChangeEngine.getSessionKey(groupId,DBConstant.SESSION_TYPE_GROUP),isFor); triggerEvent(new GroupEvent(GroupEvent.Event.SHIELD_GROUP_OK,entity)); } catch (IOException e) { logger.e("reqChangeGroupMember parse error!"); triggerEvent(new GroupEvent(GroupEvent.Event.SHIELD_GROUP_FAIL)); } } @Override public void onFaild() { triggerEvent(new GroupEvent(GroupEvent.Event.SHIELD_GROUP_FAIL)); } @Override public void onTimeout() { triggerEvent(new GroupEvent(GroupEvent.Event.SHIELD_GROUP_TIMEOUT)); } }); } /** * 收到群成员发生变更消息 * 服务端主动发出 * DB */ public void receiveGroupChangeMemberNotify(IMGroup.IMGroupChangeMemberNotify notify){ int groupId = notify.getGroupId(); int changeType = ProtoBuf2JavaBean.getGroupChangeType(notify.getChangeType()); List<Integer> changeList = notify.getChgUserIdListList(); List<Integer> curMemberList = notify.getCurUserIdListList(); if(groupMap.containsKey(groupId)){ GroupEntity entity = groupMap.get(groupId); entity.setlistGroupMemberIds(curMemberList); dbInterface.insertOrUpdateGroup(entity); groupMap.put(groupId,entity); GroupEvent groupEvent = new GroupEvent(GroupEvent.Event.CHANGE_GROUP_MEMBER_SUCCESS); groupEvent.setChangeList(changeList); groupEvent.setChangeType(changeType); groupEvent.setGroupEntity(entity); triggerEvent(groupEvent); }else{ //todo 没有就暂时不管了,只要聊过天都会显示在回话里面 } } public List<GroupEntity> getNormalGroupList() { List<GroupEntity> normalGroupList = new ArrayList<>(); for (Entry<Integer, GroupEntity> entry : groupMap.entrySet()) { GroupEntity group = entry.getValue(); if (group == null) { continue; } if (group.getGroupType() == DBConstant.GROUP_TYPE_NORMAL) { normalGroupList.add(group); } } return normalGroupList; } // 该方法只有正式群 // todo eric efficiency public List<GroupEntity> getNormalGroupSortedList() { List<GroupEntity> groupList = getNormalGroupList(); Collections.sort(groupList, new Comparator<GroupEntity>(){ @Override public int compare(GroupEntity entity1, GroupEntity entity2) { if(entity1.getPinyinElement().pinyin==null) { PinYin.getPinYin(entity1.getMainName(), entity1.getPinyinElement()); } if(entity2.getPinyinElement().pinyin==null) { PinYin.getPinYin(entity2.getMainName(),entity2.getPinyinElement()); } return entity1.getPinyinElement().pinyin.compareToIgnoreCase(entity2.getPinyinElement().pinyin); } }); return groupList; } public GroupEntity findGroup(int groupId) { logger.d("group#findGroup groupId:%s", groupId); if(groupMap.containsKey(groupId)){ return groupMap.get(groupId); } return null; } public List<GroupEntity> getSearchAllGroupList(String key){ List<GroupEntity> searchList = new ArrayList<>(); for(Map.Entry<Integer,GroupEntity> entry:groupMap.entrySet()){ GroupEntity groupEntity = entry.getValue(); if (IMUIHelper.handleGroupSearch(key, groupEntity)) { searchList.add(groupEntity); } } return searchList; } public List<UserEntity> getGroupMembers(int groupId) { logger.d("group#getGroupMembers groupId:%s", groupId); GroupEntity group = findGroup(groupId); if (group == null) { logger.e("group#no such group id:%s", groupId); return null; } Set<Integer> userList = group.getlistGroupMemberIds(); ArrayList<UserEntity> memberList = new ArrayList<UserEntity>(); for (Integer id : userList) { UserEntity contact = IMContactManager.instance().findContact(id); if (contact == null) { logger.e("group#no such contact id:%s", id); continue; } memberList.add(contact); } return memberList; } /**------set/get 的定义*/ public Map<Integer, GroupEntity> getGroupMap() { return groupMap; } public boolean isGroupReady() { return isGroupReady; } }
hgl888/TeamTalk
android/app/src/main/java/com/show/tt/imservice/manager/IMGroupManager.java
Java
apache-2.0
22,289
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: object_detection/protos/argmax_matcher.proto #define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION #include "object_detection/protos/argmax_matcher.pb.h" #include <algorithm> #include <google/protobuf/stubs/common.h> #include <google/protobuf/stubs/port.h> #include <google/protobuf/stubs/once.h> #include <google/protobuf/io/coded_stream.h> #include <google/protobuf/wire_format_lite_inl.h> #include <google/protobuf/descriptor.h> #include <google/protobuf/generated_message_reflection.h> #include <google/protobuf/reflection_ops.h> #include <google/protobuf/wire_format.h> // @@protoc_insertion_point(includes) namespace object_detection { namespace protos { class ArgMaxMatcherDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<ArgMaxMatcher> _instance; } _ArgMaxMatcher_default_instance_; namespace protobuf_object_5fdetection_2fprotos_2fargmax_5fmatcher_2eproto { namespace { ::google::protobuf::Metadata file_level_metadata[1]; } // namespace PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::ParseTableField const TableStruct::entries[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { {0, 0, 0, ::google::protobuf::internal::kInvalidMask, 0, 0}, }; PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::AuxillaryParseTableField const TableStruct::aux[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { ::google::protobuf::internal::AuxillaryParseTableField(), }; PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::ParseTable const TableStruct::schema[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { { NULL, NULL, 0, -1, -1, -1, -1, NULL, false }, }; const ::google::protobuf::uint32 TableStruct::offsets[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ArgMaxMatcher, _has_bits_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ArgMaxMatcher, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ArgMaxMatcher, matched_threshold_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ArgMaxMatcher, unmatched_threshold_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ArgMaxMatcher, ignore_thresholds_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ArgMaxMatcher, negatives_lower_than_unmatched_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ArgMaxMatcher, force_match_for_each_row_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ArgMaxMatcher, use_matmul_gather_), 4, 5, 0, 3, 1, 2, }; static const ::google::protobuf::internal::MigrationSchema schemas[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { { 0, 11, sizeof(ArgMaxMatcher)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { reinterpret_cast<const ::google::protobuf::Message*>(&_ArgMaxMatcher_default_instance_), }; namespace { void protobuf_AssignDescriptors() { AddDescriptors(); ::google::protobuf::MessageFactory* factory = NULL; AssignDescriptors( "object_detection/protos/argmax_matcher.proto", schemas, file_default_instances, TableStruct::offsets, factory, file_level_metadata, NULL, NULL); } void protobuf_AssignDescriptorsOnce() { static GOOGLE_PROTOBUF_DECLARE_ONCE(once); ::google::protobuf::GoogleOnceInit(&once, &protobuf_AssignDescriptors); } void protobuf_RegisterTypes(const ::std::string&) GOOGLE_ATTRIBUTE_COLD; void protobuf_RegisterTypes(const ::std::string&) { protobuf_AssignDescriptorsOnce(); ::google::protobuf::internal::RegisterAllTypes(file_level_metadata, 1); } } // namespace void TableStruct::InitDefaultsImpl() { GOOGLE_PROTOBUF_VERIFY_VERSION; ::google::protobuf::internal::InitProtobufDefaults(); _ArgMaxMatcher_default_instance_._instance.DefaultConstruct(); ::google::protobuf::internal::OnShutdownDestroyMessage( &_ArgMaxMatcher_default_instance_);} void InitDefaults() { static GOOGLE_PROTOBUF_DECLARE_ONCE(once); ::google::protobuf::GoogleOnceInit(&once, &TableStruct::InitDefaultsImpl); } namespace { void AddDescriptorsImpl() { InitDefaults(); static const char descriptor[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { "\n,object_detection/protos/argmax_matcher" ".proto\022\027object_detection.protos\"\354\001\n\rArgM" "axMatcher\022\036\n\021matched_threshold\030\001 \001(\002:\0030." "5\022 \n\023unmatched_threshold\030\002 \001(\002:\0030.5\022 \n\021i" "gnore_thresholds\030\003 \001(\010:\005false\022,\n\036negativ" "es_lower_than_unmatched\030\004 \001(\010:\004true\022\'\n\030f" "orce_match_for_each_row\030\005 \001(\010:\005false\022 \n\021" "use_matmul_gather\030\006 \001(\010:\005false" }; ::google::protobuf::DescriptorPool::InternalAddGeneratedFile( descriptor, 310); ::google::protobuf::MessageFactory::InternalRegisterGeneratedFile( "object_detection/protos/argmax_matcher.proto", &protobuf_RegisterTypes); } } // anonymous namespace void AddDescriptors() { static GOOGLE_PROTOBUF_DECLARE_ONCE(once); ::google::protobuf::GoogleOnceInit(&once, &AddDescriptorsImpl); } // Force AddDescriptors() to be called at dynamic initialization time. struct StaticDescriptorInitializer { StaticDescriptorInitializer() { AddDescriptors(); } } static_descriptor_initializer; } // namespace protobuf_object_5fdetection_2fprotos_2fargmax_5fmatcher_2eproto // =================================================================== #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int ArgMaxMatcher::kMatchedThresholdFieldNumber; const int ArgMaxMatcher::kUnmatchedThresholdFieldNumber; const int ArgMaxMatcher::kIgnoreThresholdsFieldNumber; const int ArgMaxMatcher::kNegativesLowerThanUnmatchedFieldNumber; const int ArgMaxMatcher::kForceMatchForEachRowFieldNumber; const int ArgMaxMatcher::kUseMatmulGatherFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ArgMaxMatcher::ArgMaxMatcher() : ::google::protobuf::Message(), _internal_metadata_(NULL) { if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) { protobuf_object_5fdetection_2fprotos_2fargmax_5fmatcher_2eproto::InitDefaults(); } SharedCtor(); // @@protoc_insertion_point(constructor:object_detection.protos.ArgMaxMatcher) } ArgMaxMatcher::ArgMaxMatcher(const ArgMaxMatcher& from) : ::google::protobuf::Message(), _internal_metadata_(NULL), _has_bits_(from._has_bits_), _cached_size_(0) { _internal_metadata_.MergeFrom(from._internal_metadata_); ::memcpy(&ignore_thresholds_, &from.ignore_thresholds_, static_cast<size_t>(reinterpret_cast<char*>(&unmatched_threshold_) - reinterpret_cast<char*>(&ignore_thresholds_)) + sizeof(unmatched_threshold_)); // @@protoc_insertion_point(copy_constructor:object_detection.protos.ArgMaxMatcher) } void ArgMaxMatcher::SharedCtor() { _cached_size_ = 0; ::memset(&ignore_thresholds_, 0, static_cast<size_t>( reinterpret_cast<char*>(&use_matmul_gather_) - reinterpret_cast<char*>(&ignore_thresholds_)) + sizeof(use_matmul_gather_)); negatives_lower_than_unmatched_ = true; matched_threshold_ = 0.5f; unmatched_threshold_ = 0.5f; } ArgMaxMatcher::~ArgMaxMatcher() { // @@protoc_insertion_point(destructor:object_detection.protos.ArgMaxMatcher) SharedDtor(); } void ArgMaxMatcher::SharedDtor() { } void ArgMaxMatcher::SetCachedSize(int size) const { GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); _cached_size_ = size; GOOGLE_SAFE_CONCURRENT_WRITES_END(); } const ::google::protobuf::Descriptor* ArgMaxMatcher::descriptor() { protobuf_object_5fdetection_2fprotos_2fargmax_5fmatcher_2eproto::protobuf_AssignDescriptorsOnce(); return protobuf_object_5fdetection_2fprotos_2fargmax_5fmatcher_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; } const ArgMaxMatcher& ArgMaxMatcher::default_instance() { protobuf_object_5fdetection_2fprotos_2fargmax_5fmatcher_2eproto::InitDefaults(); return *internal_default_instance(); } ArgMaxMatcher* ArgMaxMatcher::New(::google::protobuf::Arena* arena) const { ArgMaxMatcher* n = new ArgMaxMatcher; if (arena != NULL) { arena->Own(n); } return n; } void ArgMaxMatcher::Clear() { // @@protoc_insertion_point(message_clear_start:object_detection.protos.ArgMaxMatcher) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; cached_has_bits = _has_bits_[0]; if (cached_has_bits & 63u) { ::memset(&ignore_thresholds_, 0, static_cast<size_t>( reinterpret_cast<char*>(&use_matmul_gather_) - reinterpret_cast<char*>(&ignore_thresholds_)) + sizeof(use_matmul_gather_)); negatives_lower_than_unmatched_ = true; matched_threshold_ = 0.5f; unmatched_threshold_ = 0.5f; } _has_bits_.Clear(); _internal_metadata_.Clear(); } bool ArgMaxMatcher::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; // @@protoc_insertion_point(parse_start:object_detection.protos.ArgMaxMatcher) for (;;) { ::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { // optional float matched_threshold = 1 [default = 0.5]; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(13u /* 13 & 0xFF */)) { set_has_matched_threshold(); DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>( input, &matched_threshold_))); } else { goto handle_unusual; } break; } // optional float unmatched_threshold = 2 [default = 0.5]; case 2: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(21u /* 21 & 0xFF */)) { set_has_unmatched_threshold(); DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>( input, &unmatched_threshold_))); } else { goto handle_unusual; } break; } // optional bool ignore_thresholds = 3 [default = false]; case 3: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(24u /* 24 & 0xFF */)) { set_has_ignore_thresholds(); DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( input, &ignore_thresholds_))); } else { goto handle_unusual; } break; } // optional bool negatives_lower_than_unmatched = 4 [default = true]; case 4: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(32u /* 32 & 0xFF */)) { set_has_negatives_lower_than_unmatched(); DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( input, &negatives_lower_than_unmatched_))); } else { goto handle_unusual; } break; } // optional bool force_match_for_each_row = 5 [default = false]; case 5: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(40u /* 40 & 0xFF */)) { set_has_force_match_for_each_row(); DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( input, &force_match_for_each_row_))); } else { goto handle_unusual; } break; } // optional bool use_matmul_gather = 6 [default = false]; case 6: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(48u /* 48 & 0xFF */)) { set_has_use_matmul_gather(); DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( input, &use_matmul_gather_))); } else { goto handle_unusual; } break; } default: { handle_unusual: if (tag == 0) { goto success; } DO_(::google::protobuf::internal::WireFormat::SkipField( input, tag, _internal_metadata_.mutable_unknown_fields())); break; } } } success: // @@protoc_insertion_point(parse_success:object_detection.protos.ArgMaxMatcher) return true; failure: // @@protoc_insertion_point(parse_failure:object_detection.protos.ArgMaxMatcher) return false; #undef DO_ } void ArgMaxMatcher::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { // @@protoc_insertion_point(serialize_start:object_detection.protos.ArgMaxMatcher) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; cached_has_bits = _has_bits_[0]; // optional float matched_threshold = 1 [default = 0.5]; if (cached_has_bits & 0x00000010u) { ::google::protobuf::internal::WireFormatLite::WriteFloat(1, this->matched_threshold(), output); } // optional float unmatched_threshold = 2 [default = 0.5]; if (cached_has_bits & 0x00000020u) { ::google::protobuf::internal::WireFormatLite::WriteFloat(2, this->unmatched_threshold(), output); } // optional bool ignore_thresholds = 3 [default = false]; if (cached_has_bits & 0x00000001u) { ::google::protobuf::internal::WireFormatLite::WriteBool(3, this->ignore_thresholds(), output); } // optional bool negatives_lower_than_unmatched = 4 [default = true]; if (cached_has_bits & 0x00000008u) { ::google::protobuf::internal::WireFormatLite::WriteBool(4, this->negatives_lower_than_unmatched(), output); } // optional bool force_match_for_each_row = 5 [default = false]; if (cached_has_bits & 0x00000002u) { ::google::protobuf::internal::WireFormatLite::WriteBool(5, this->force_match_for_each_row(), output); } // optional bool use_matmul_gather = 6 [default = false]; if (cached_has_bits & 0x00000004u) { ::google::protobuf::internal::WireFormatLite::WriteBool(6, this->use_matmul_gather(), output); } if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); } // @@protoc_insertion_point(serialize_end:object_detection.protos.ArgMaxMatcher) } ::google::protobuf::uint8* ArgMaxMatcher::InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const { (void)deterministic; // Unused // @@protoc_insertion_point(serialize_to_array_start:object_detection.protos.ArgMaxMatcher) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; cached_has_bits = _has_bits_[0]; // optional float matched_threshold = 1 [default = 0.5]; if (cached_has_bits & 0x00000010u) { target = ::google::protobuf::internal::WireFormatLite::WriteFloatToArray(1, this->matched_threshold(), target); } // optional float unmatched_threshold = 2 [default = 0.5]; if (cached_has_bits & 0x00000020u) { target = ::google::protobuf::internal::WireFormatLite::WriteFloatToArray(2, this->unmatched_threshold(), target); } // optional bool ignore_thresholds = 3 [default = false]; if (cached_has_bits & 0x00000001u) { target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(3, this->ignore_thresholds(), target); } // optional bool negatives_lower_than_unmatched = 4 [default = true]; if (cached_has_bits & 0x00000008u) { target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(4, this->negatives_lower_than_unmatched(), target); } // optional bool force_match_for_each_row = 5 [default = false]; if (cached_has_bits & 0x00000002u) { target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(5, this->force_match_for_each_row(), target); } // optional bool use_matmul_gather = 6 [default = false]; if (cached_has_bits & 0x00000004u) { target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(6, this->use_matmul_gather(), target); } if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); } // @@protoc_insertion_point(serialize_to_array_end:object_detection.protos.ArgMaxMatcher) return target; } size_t ArgMaxMatcher::ByteSizeLong() const { // @@protoc_insertion_point(message_byte_size_start:object_detection.protos.ArgMaxMatcher) size_t total_size = 0; if (_internal_metadata_.have_unknown_fields()) { total_size += ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( _internal_metadata_.unknown_fields()); } if (_has_bits_[0 / 32] & 63u) { // optional bool ignore_thresholds = 3 [default = false]; if (has_ignore_thresholds()) { total_size += 1 + 1; } // optional bool force_match_for_each_row = 5 [default = false]; if (has_force_match_for_each_row()) { total_size += 1 + 1; } // optional bool use_matmul_gather = 6 [default = false]; if (has_use_matmul_gather()) { total_size += 1 + 1; } // optional bool negatives_lower_than_unmatched = 4 [default = true]; if (has_negatives_lower_than_unmatched()) { total_size += 1 + 1; } // optional float matched_threshold = 1 [default = 0.5]; if (has_matched_threshold()) { total_size += 1 + 4; } // optional float unmatched_threshold = 2 [default = 0.5]; if (has_unmatched_threshold()) { total_size += 1 + 4; } } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); _cached_size_ = cached_size; GOOGLE_SAFE_CONCURRENT_WRITES_END(); return total_size; } void ArgMaxMatcher::MergeFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_merge_from_start:object_detection.protos.ArgMaxMatcher) GOOGLE_DCHECK_NE(&from, this); const ArgMaxMatcher* source = ::google::protobuf::internal::DynamicCastToGenerated<const ArgMaxMatcher>( &from); if (source == NULL) { // @@protoc_insertion_point(generalized_merge_from_cast_fail:object_detection.protos.ArgMaxMatcher) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { // @@protoc_insertion_point(generalized_merge_from_cast_success:object_detection.protos.ArgMaxMatcher) MergeFrom(*source); } } void ArgMaxMatcher::MergeFrom(const ArgMaxMatcher& from) { // @@protoc_insertion_point(class_specific_merge_from_start:object_detection.protos.ArgMaxMatcher) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; cached_has_bits = from._has_bits_[0]; if (cached_has_bits & 63u) { if (cached_has_bits & 0x00000001u) { ignore_thresholds_ = from.ignore_thresholds_; } if (cached_has_bits & 0x00000002u) { force_match_for_each_row_ = from.force_match_for_each_row_; } if (cached_has_bits & 0x00000004u) { use_matmul_gather_ = from.use_matmul_gather_; } if (cached_has_bits & 0x00000008u) { negatives_lower_than_unmatched_ = from.negatives_lower_than_unmatched_; } if (cached_has_bits & 0x00000010u) { matched_threshold_ = from.matched_threshold_; } if (cached_has_bits & 0x00000020u) { unmatched_threshold_ = from.unmatched_threshold_; } _has_bits_[0] |= cached_has_bits; } } void ArgMaxMatcher::CopyFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_copy_from_start:object_detection.protos.ArgMaxMatcher) if (&from == this) return; Clear(); MergeFrom(from); } void ArgMaxMatcher::CopyFrom(const ArgMaxMatcher& from) { // @@protoc_insertion_point(class_specific_copy_from_start:object_detection.protos.ArgMaxMatcher) if (&from == this) return; Clear(); MergeFrom(from); } bool ArgMaxMatcher::IsInitialized() const { return true; } void ArgMaxMatcher::Swap(ArgMaxMatcher* other) { if (other == this) return; InternalSwap(other); } void ArgMaxMatcher::InternalSwap(ArgMaxMatcher* other) { using std::swap; swap(ignore_thresholds_, other->ignore_thresholds_); swap(force_match_for_each_row_, other->force_match_for_each_row_); swap(use_matmul_gather_, other->use_matmul_gather_); swap(negatives_lower_than_unmatched_, other->negatives_lower_than_unmatched_); swap(matched_threshold_, other->matched_threshold_); swap(unmatched_threshold_, other->unmatched_threshold_); swap(_has_bits_[0], other->_has_bits_[0]); _internal_metadata_.Swap(&other->_internal_metadata_); swap(_cached_size_, other->_cached_size_); } ::google::protobuf::Metadata ArgMaxMatcher::GetMetadata() const { protobuf_object_5fdetection_2fprotos_2fargmax_5fmatcher_2eproto::protobuf_AssignDescriptorsOnce(); return protobuf_object_5fdetection_2fprotos_2fargmax_5fmatcher_2eproto::file_level_metadata[kIndexInFileMessages]; } #if PROTOBUF_INLINE_NOT_IN_HEADERS // ArgMaxMatcher // optional float matched_threshold = 1 [default = 0.5]; bool ArgMaxMatcher::has_matched_threshold() const { return (_has_bits_[0] & 0x00000010u) != 0; } void ArgMaxMatcher::set_has_matched_threshold() { _has_bits_[0] |= 0x00000010u; } void ArgMaxMatcher::clear_has_matched_threshold() { _has_bits_[0] &= ~0x00000010u; } void ArgMaxMatcher::clear_matched_threshold() { matched_threshold_ = 0.5f; clear_has_matched_threshold(); } float ArgMaxMatcher::matched_threshold() const { // @@protoc_insertion_point(field_get:object_detection.protos.ArgMaxMatcher.matched_threshold) return matched_threshold_; } void ArgMaxMatcher::set_matched_threshold(float value) { set_has_matched_threshold(); matched_threshold_ = value; // @@protoc_insertion_point(field_set:object_detection.protos.ArgMaxMatcher.matched_threshold) } // optional float unmatched_threshold = 2 [default = 0.5]; bool ArgMaxMatcher::has_unmatched_threshold() const { return (_has_bits_[0] & 0x00000020u) != 0; } void ArgMaxMatcher::set_has_unmatched_threshold() { _has_bits_[0] |= 0x00000020u; } void ArgMaxMatcher::clear_has_unmatched_threshold() { _has_bits_[0] &= ~0x00000020u; } void ArgMaxMatcher::clear_unmatched_threshold() { unmatched_threshold_ = 0.5f; clear_has_unmatched_threshold(); } float ArgMaxMatcher::unmatched_threshold() const { // @@protoc_insertion_point(field_get:object_detection.protos.ArgMaxMatcher.unmatched_threshold) return unmatched_threshold_; } void ArgMaxMatcher::set_unmatched_threshold(float value) { set_has_unmatched_threshold(); unmatched_threshold_ = value; // @@protoc_insertion_point(field_set:object_detection.protos.ArgMaxMatcher.unmatched_threshold) } // optional bool ignore_thresholds = 3 [default = false]; bool ArgMaxMatcher::has_ignore_thresholds() const { return (_has_bits_[0] & 0x00000001u) != 0; } void ArgMaxMatcher::set_has_ignore_thresholds() { _has_bits_[0] |= 0x00000001u; } void ArgMaxMatcher::clear_has_ignore_thresholds() { _has_bits_[0] &= ~0x00000001u; } void ArgMaxMatcher::clear_ignore_thresholds() { ignore_thresholds_ = false; clear_has_ignore_thresholds(); } bool ArgMaxMatcher::ignore_thresholds() const { // @@protoc_insertion_point(field_get:object_detection.protos.ArgMaxMatcher.ignore_thresholds) return ignore_thresholds_; } void ArgMaxMatcher::set_ignore_thresholds(bool value) { set_has_ignore_thresholds(); ignore_thresholds_ = value; // @@protoc_insertion_point(field_set:object_detection.protos.ArgMaxMatcher.ignore_thresholds) } // optional bool negatives_lower_than_unmatched = 4 [default = true]; bool ArgMaxMatcher::has_negatives_lower_than_unmatched() const { return (_has_bits_[0] & 0x00000008u) != 0; } void ArgMaxMatcher::set_has_negatives_lower_than_unmatched() { _has_bits_[0] |= 0x00000008u; } void ArgMaxMatcher::clear_has_negatives_lower_than_unmatched() { _has_bits_[0] &= ~0x00000008u; } void ArgMaxMatcher::clear_negatives_lower_than_unmatched() { negatives_lower_than_unmatched_ = true; clear_has_negatives_lower_than_unmatched(); } bool ArgMaxMatcher::negatives_lower_than_unmatched() const { // @@protoc_insertion_point(field_get:object_detection.protos.ArgMaxMatcher.negatives_lower_than_unmatched) return negatives_lower_than_unmatched_; } void ArgMaxMatcher::set_negatives_lower_than_unmatched(bool value) { set_has_negatives_lower_than_unmatched(); negatives_lower_than_unmatched_ = value; // @@protoc_insertion_point(field_set:object_detection.protos.ArgMaxMatcher.negatives_lower_than_unmatched) } // optional bool force_match_for_each_row = 5 [default = false]; bool ArgMaxMatcher::has_force_match_for_each_row() const { return (_has_bits_[0] & 0x00000002u) != 0; } void ArgMaxMatcher::set_has_force_match_for_each_row() { _has_bits_[0] |= 0x00000002u; } void ArgMaxMatcher::clear_has_force_match_for_each_row() { _has_bits_[0] &= ~0x00000002u; } void ArgMaxMatcher::clear_force_match_for_each_row() { force_match_for_each_row_ = false; clear_has_force_match_for_each_row(); } bool ArgMaxMatcher::force_match_for_each_row() const { // @@protoc_insertion_point(field_get:object_detection.protos.ArgMaxMatcher.force_match_for_each_row) return force_match_for_each_row_; } void ArgMaxMatcher::set_force_match_for_each_row(bool value) { set_has_force_match_for_each_row(); force_match_for_each_row_ = value; // @@protoc_insertion_point(field_set:object_detection.protos.ArgMaxMatcher.force_match_for_each_row) } // optional bool use_matmul_gather = 6 [default = false]; bool ArgMaxMatcher::has_use_matmul_gather() const { return (_has_bits_[0] & 0x00000004u) != 0; } void ArgMaxMatcher::set_has_use_matmul_gather() { _has_bits_[0] |= 0x00000004u; } void ArgMaxMatcher::clear_has_use_matmul_gather() { _has_bits_[0] &= ~0x00000004u; } void ArgMaxMatcher::clear_use_matmul_gather() { use_matmul_gather_ = false; clear_has_use_matmul_gather(); } bool ArgMaxMatcher::use_matmul_gather() const { // @@protoc_insertion_point(field_get:object_detection.protos.ArgMaxMatcher.use_matmul_gather) return use_matmul_gather_; } void ArgMaxMatcher::set_use_matmul_gather(bool value) { set_has_use_matmul_gather(); use_matmul_gather_ = value; // @@protoc_insertion_point(field_set:object_detection.protos.ArgMaxMatcher.use_matmul_gather) } #endif // PROTOBUF_INLINE_NOT_IN_HEADERS // @@protoc_insertion_point(namespace_scope) } // namespace protos } // namespace object_detection // @@protoc_insertion_point(global_scope)
mlperf/inference_results_v0.7
open/DellEMC/code/ssd-mobilenet/xilinx/include/object_detection/protos/argmax_matcher.pb.cc
C++
apache-2.0
27,446
/* Copyright 2020 The Matrix.org Foundation C.I.C. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /** * Room storage format: * { * id: "matrix|remote|link_key", // customisable * matrix_id: "room_id", * remote_id: "remote_room_id", * matrix: { serialised matrix room info }, * remote: { serialised remote room info }, * data: { ... any additional info ... } * } * * Each document can either represent a matrix room, a remote room, or * a mapping. They look like this: * MATRIX * { * id: "!room:id", * matrix_id: "!room:id", * matrix: { .. custom data eg name: "A happy place" .. } * } * * REMOTE (e.g. IRC) * { * id: "irc.freenode.net_#channame", * remote_id: "irc.freenode.net_#channame", * remote: { .. custom data e.g. is_pm_room: true .. } * } * * MAPPING * { * id: "!room:id__irc.freenode.net_#channame", // link key; customisable. * matrix_id: "!room:id", * remote_id: "irc.freenode.net_#channame", * matrix: { .. custom data .. }, * remote: { .. custom data .. }, * data: { .. custom data about the mapping ..} * } * * A unique, non-sparse index can be set on the 'id' key, and non-unique, * sparse indexes can be set on matrix_id and remote_id to make mappings * quicker to compute. * */ import Datastore from "nedb"; import { BridgeStore } from "./bridge-store"; import { MatrixRoom, MatrixRoomData } from "../models/rooms/matrix"; import { RemoteRoom } from "../models/rooms/remote"; export class RoomBridgeStore extends BridgeStore { public delimiter = " "; /** * Construct a store suitable for room bridging information. Data is stored * as {@link RoomBridgeStoreEntry}s which have the following * *serialized* format: * ``` * { * id: "unique_id", // customisable * matrix_id: "room_id", * remote_id: "remote_room_id", * matrix: { serialised matrix room info }, * remote: { serialised remote room info }, * data: { ... any additional info ... } * } * ``` * If a unique 'id' is not given, the store will generate one by concatenating * the `matrix_id` and the `remote_id`. The delimiter * used is a property on this store and can be modified. * * The structure of Entry objects means that it is efficient to select based * off the 'id', 'matrix_id' or 'remote_id'. Additional indexes can be added * manually. * @constructor * @param db The connected NEDB database instance * @param opts Options for this store. */ constructor(db: Datastore) { super(db); } /** * Insert an entry, clobbering based on the ID of the entry. * @param entry */ public upsertEntry(entry: RoomBridgeStoreEntry) { return this.upsert({ id: entry.id }, RoomBridgeStoreEntry.serializeEntry(entry) as Record<string, unknown>); } /** * Get an existing entry based on the provided entry ID. * @param id The ID of the entry to retrieve. */ public getEntryById(id: string) { return this.selectOne({ id: id }, this.convertTo((doc: RoomStoreEntryDoc) => new RoomBridgeStoreEntry(doc) )); } /** * Get a list of entries based on the matrix_id of each entry. * @param matrixId */ public getEntriesByMatrixId(matrixId: string) { return this.select({ matrix_id: matrixId }, this.convertTo((doc: RoomStoreEntryDoc) => new RoomBridgeStoreEntry(doc) )); } /** * A batch version of <code>getEntriesByMatrixId</code>. * @param ids * @return Resolves to a map of room_id => Entry[] */ public async getEntriesByMatrixIds(ids: string[]) { // eslint-disable-next-line camelcase const docs = await this.select<{ matrix_id: string }, RoomStoreEntryDoc>({ matrix_id: { $in: ids } }); if (!docs) { return {}; } const entries: {[matrixId: string]: RoomBridgeStoreEntry[]} = {}; docs.forEach((doc: RoomStoreEntryDoc) => { if (!doc.matrix_id) { return; } if (!entries[doc.matrix_id]) { entries[doc.matrix_id] = []; } entries[doc.matrix_id].push(new RoomBridgeStoreEntry(doc)); }); return entries; } /** * Get a list of entries based on the remote_id of each entry. * @param remoteId */ public getEntriesByRemoteId(remoteId: string) { return this.select({ remote_id: remoteId }, this.convertTo((doc: RoomStoreEntryDoc) => new RoomBridgeStoreEntry(doc) )); } /** * Create a link between a matrix room and remote room. This will create an entry with: * - The matrix_id set to the matrix room ID. * - The remote_id set to the remote room ID. * - The id set to the id value given OR a concatenation of the matrix and remote IDs * if one is not provided. * @param matrixRoom The matrix room * @param remoteRoom The remote room * @param data Information about this mapping. * @param linkId The id value to set. If not given, a unique ID will be * created from the matrix_id and remote_id. */ public linkRooms(matrixRoom: MatrixRoom, remoteRoom: RemoteRoom, data: Record<string, unknown>={}, linkId?: string) { linkId = linkId || RoomBridgeStore.createUniqueId( matrixRoom.getId(), remoteRoom.getId(), this.delimiter ); return this.upsert({ id: linkId }, { id: linkId, remote_id: remoteRoom.getId(), matrix_id: matrixRoom.getId(), remote: remoteRoom.serialize(), matrix: matrixRoom.serialize(), data: data }); } /** * Create an entry with only a matrix room. Sets the 'id' of the entry to the * Matrix room ID. If an entry already exists with this 'id', it will be replaced. * This function is useful if you just want to store a room with some data and not * worry about any mappings. * @param matrixRoom * @see RoomBridgeStore#getMatrixRoom */ public setMatrixRoom(matrixRoom: MatrixRoom) { const entry = new RoomBridgeStoreEntry({ id: matrixRoom.getId(), matrix_id: matrixRoom.getId(), matrix: matrixRoom.serialize(), }); return this.upsertEntry(entry); } /** * Get an entry's Matrix room based on the provided room_id. The entry MUST have * an 'id' of the room_id and there MUST be a Matrix room contained within the * entry for this to return. * @param roomId * @see RoomBridgeStore#setMatrixRoom */ public getMatrixRoom(roomId: string) { return this.getEntryById(roomId).then(function(e) { return e ? e.matrix : null; }); } /** * Get all entries with the given remote_id which have a Matrix room within. * @param remoteId */ public async getLinkedMatrixRooms(remoteId: string) { const entries = await this.getEntriesByRemoteId(remoteId); if (!entries) { return []; } return entries.filter(function(e) { return Boolean(e.matrix); }).map(function(e) { return e.matrix; }) as MatrixRoom[]; } /** * Get all entries with the given matrix_id which have a Remote room within. * @param matrixId */ public async getLinkedRemoteRooms(matrixId: string) { const entries = await this.getEntriesByMatrixId(matrixId); if (!entries) { return []; } return entries.filter(function(e) { return Boolean(e.remote); }).map(function(e) { return e.remote; }) as RemoteRoom[]; } /** * A batched version of `getLinkedRemoteRooms`. * @param matrixIds * @return A mapping of room_id to RemoteRoom. * @see RoomBridgeStore#getLinkedRemoteRooms */ public async batchGetLinkedRemoteRooms(matrixIds: string[]) { const entryMap = await this.getEntriesByMatrixIds(matrixIds); const result: {[roomId: string]: RemoteRoom[]} = {}; for (const [key, obj] of Object.entries(entryMap)) { result[key] = obj.filter((e) => { return Boolean(e.remote); }).map((e) => { return e.remote; }) as RemoteRoom[]; } return result; } /** * Get a list of entries based on a RemoteRoom data value. * @param data The data values to retrieve based from. * @example * remoteRoom.set("some_key", "some_val"); * // store remoteRoom and then: * store.getEntriesByRemoteRoomData({ * some_key: "some_val" * }); */ public getEntriesByRemoteRoomData(data: Record<string, unknown>) { Object.keys(data).forEach(function(k) { const query = data[k]; delete data[k]; data["remote." + k] = query; }); return this.select(data, this.convertTo((doc: RoomStoreEntryDoc) => new RoomBridgeStoreEntry(doc) )); } /** * Get a list of entries based on a MatrixRoom data value. * @param data The data values to retrieve based from. * @example * matrixRoom.set("some_key", "some_val"); * // store matrixRoom and then: * store.getEntriesByMatrixRoomData({ * some_key: "some_val" * }); */ public getEntriesByMatrixRoomData(data: Record<string, unknown>) { Object.keys(data).forEach(function(k) { const query = data[k]; delete data[k]; data["matrix.extras." + k] = query; }); return this.select(data, this.convertTo((doc: RoomStoreEntryDoc) => new RoomBridgeStoreEntry(doc) )); } /** * Get a list of entries based on the link's data value. * @param data The data values to retrieve based from. * @example * store.linkRooms(matrixRoom, remoteRoom, { some_key: "some_val" }); * store.getEntriesByLinkData({ * some_key: "some_val" * }); */ public getEntriesByLinkData(data: Record<string, unknown>) { Object.keys(data).forEach(function(k) { const query = data[k]; delete data[k]; data["data." + k] = query; }); return this.select(data, this.convertTo((doc: RoomStoreEntryDoc) => new RoomBridgeStoreEntry(doc) )); } /** * Remove entries based on remote room data. * @param data The data to match. * @example * remoteRoom.set("a_key", "a_val"); * // store remoteRoom and then: * store.removeEntriesByRemoteRoomData({ * a_key: "a_val" * }); */ public removeEntriesByRemoteRoomData(data: Record<string, unknown>) { Object.keys(data).forEach(function(k) { const query = data[k]; delete data[k]; data["remote." + k] = query; }); return this.delete(data); } /** * Remove entries with this remote room id. * @param remoteId The remote id. * @example * new RemoteRoom("foobar"); * // store the RemoteRoom and then: * store.removeEntriesByRemoteRoomId("foobar"); */ public removeEntriesByRemoteRoomId(remoteId: string) { return this.delete({ remote_id: remoteId }); } /** * Remove entries based on matrix room data. * @param data The data to match. * @example * matrixRoom.set("a_key", "a_val"); * // store matrixRoom and then: * store.removeEntriesByMatrixRoomData({ * a_key: "a_val" * }); */ public removeEntriesByMatrixRoomData(data: Record<string, unknown>) { Object.keys(data).forEach(function(k) { const query = data[k]; delete data[k]; data["matrix.extras." + k] = query; }); return this.delete(data); } /** * Remove entries with this matrix room id. * @param matrixId The matrix id. * @example * new MatrixRoom("!foobar:matrix.org"); * // store the MatrixRoom and then: * store.removeEntriesByMatrixRoomId("!foobar:matrix.org"); */ public removeEntriesByMatrixRoomId(matrixId: string) { return this.delete({ matrix_id: matrixId }); } /** * Remove entries based on the link's data value. * @param data The data to match. * @example * store.linkRooms(matrixRoom, remoteRoom, { a_key: "a_val" }); * store.removeEntriesByLinkData({ * a_key: "a_val" * }); */ public removeEntriesByLinkData(data: Record<string, unknown>) { Object.keys(data).forEach(function(k) { const query = data[k]; delete data[k]; data["data." + k] = query; }); return this.delete(data); } /** * Remove an existing entry based on the provided entry ID. * @param id The ID of the entry to remove. * @example * store.removeEntryById("anid"); */ public removeEntryById(id: string) { return this.delete({ id }); } public static createUniqueId(matrixRoomId: string, remoteRoomId: string, delimiter: string) { return (matrixRoomId || "") + delimiter + (remoteRoomId || ""); } } interface RoomStoreEntryDoc { id?: string; // eslint-disable-next-line camelcase remote_id?: string; // eslint-disable-next-line camelcase matrix_id?: string; remote?: Record<string, unknown>; matrix?: MatrixRoomData; data?: Record<string, unknown>; } export class RoomBridgeStoreEntry { public id?: string; public matrix?: MatrixRoom; public remote?: RemoteRoom; public data: Record<string, unknown>; constructor(doc?: RoomStoreEntryDoc) { this.id = doc?.id || undefined; // eslint-disable-next-line camelcase this.matrix = doc?.matrix_id ? new MatrixRoom(doc.matrix_id, doc.matrix) : undefined; // eslint-disable-next-line camelcase this.remote = doc?.remote_id ? new RemoteRoom(doc.remote_id, doc.remote) : undefined; this.data = doc?.data || {}; } // not a member function so callers can provide a POJO public static serializeEntry(entry: RoomBridgeStoreEntry): RoomStoreEntryDoc { return { id: entry.id, remote_id: entry.remote ? entry.remote.getId() : undefined, matrix_id: entry.matrix ? entry.matrix.getId() : undefined, remote: entry.remote ? entry.remote.serialize() : undefined, matrix: entry.matrix ? entry.matrix.serialize() : undefined, data: entry.data || undefined, } } }
matrix-org/matrix-appservice-bridge
src/components/room-bridge-store.ts
TypeScript
apache-2.0
15,510
import React from 'react'; import { PropTypes } from 'prop-types'; import timezones from '../../data/timezones'; import map from 'lodash/map'; import classnames from 'classnames'; import validateInput from '../../validations/signup'; import TextFieldGroup from '../Form/TextFieldGroup'; import signUp from '../../utils/signupActions'; import flashM from '../../utils/flashMessages'; class SignupForm extends React.Component { constructor(props) { super(props); this.state = { username: '', email: '', password: '', passwordConfirmation: '', timezone: '', errors: {}, isLoading: false, invalid: false } this.onChange = this.onChange.bind(this); this.onSubmit = this.onSubmit.bind(this); this.checkUserExists = this.checkUserExists.bind(this); } onChange(e) { this.setState({ [e.target.name]: e.target.value }); } isValid() { const { errors, isValid } = validateInput(this.state); if (!isValid) { this.setState({ errors }); } return isValid; } checkUserExists(e) { const field = e.target.name; const val = e.target.value; if (val !== '') { // this.props.isUserExists(val).then(res => { signUp.isUserExists(val).then(res => { let errors = this.state.errors; let invalid; if (res.data.user) { errors[field] = 'There is user with such ' + field; invalid = true; } else { errors[field] = ''; invalid = false; } this.setState({ errors, invalid }); }); } } onSubmit(e) { e.preventDefault(); if (this.isValid()) { this.setState({ errors: {}, isLoading: true }); // console.log("Here is this.props.userSignupRequest:"); // console.log(this.props.userSignupRequest); // console.log(this.state); // this.props.userSignupRequest(this.state).then( signUp.userSignupRequest(this.state).then( () => { // this.props.addFlashMessage({ flashM.addFlashMessage({ type: 'success', text: 'You signed up successfully. Welcome!' }); this.context.router.history.push('/'); }, (err) => this.setState({ errors: err.response.data, isLoading: false }) ); } } render() { const { errors } = this.state; const options = map(timezones, (val, key) => <option key={val} value={val}>{key}</option> ); return ( <form onSubmit={this.onSubmit}> <h1>Sign up now and become a pantry raider!</h1> <TextFieldGroup error={errors.username} label="Username" onChange={this.onChange} checkUserExists={this.checkUserExists} value={this.state.username} field="username" /> <TextFieldGroup error={errors.email} label="Email" onChange={this.onChange} checkUserExists={this.checkUserExists} value={this.state.email} field="email" /> <TextFieldGroup error={errors.password} label="Password" onChange={this.onChange} value={this.state.password} field="password" type="password" /> <TextFieldGroup error={errors.passwordConfirmation} label="Password Confirmation" onChange={this.onChange} value={this.state.passwordConfirmation} field="passwordConfirmation" type="password" /> <div className={classnames("form-group", { 'has-error': errors.timezone })}> <label className="control-label">Timezone</label> <select className="form-control" name="timezone" onChange={this.onChange} value={this.state.timezone} > <option value="" disabled>Choose Your Timezone</option> {options} </select> {errors.timezone && <span className="help-block">{errors.timezone}</span>} </div> <div className="form-group"> <button disabled={this.state.isLoading || this.state.invalid} className="btn btn-primary btn-lg"> Sign up </button> </div> </form> ); } } SignupForm.propTypes = { userSignupRequest: PropTypes.func.isRequired, addFlashMessage: PropTypes.func.isRequired, isUserExists: PropTypes.func.isRequired } SignupForm.contextTypes = { router: PropTypes.object.isRequired } export default SignupForm;
atflowers/pantryraid
client/src/components/SignupForm/SignupForm.1.js
JavaScript
apache-2.0
4,575
# Hedysarum canadense L. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in Sp. pl. 2:1054. 1753 #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Desmodium/Desmodium canadense/ Syn. Hedysarum canadense/README.md
Markdown
apache-2.0
195