code
stringlengths 3
1.01M
| repo_name
stringlengths 5
116
| path
stringlengths 3
311
| language
stringclasses 30
values | license
stringclasses 15
values | size
int64 3
1.01M
|
|---|---|---|---|---|---|
/*
* Copyright WSO2 Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.appmgt.core.throttle;
public class ThrottleManager {
// private static final Log log = LogFactory.getLog(ThrottleManager.class);
//
// /** Access rate controller - limit the remote caller access*/
// private AccessRateController accessController;
//
// private RoleBasedAccessRateController roleBasedAccessController;
//
// private RoleBasedAccessRateController applicationRoleBasedAccessController;
//
// private String id;
//
// /** The property key that used when the ConcurrentAccessController
// look up from ConfigurationContext */
// private String key;
//
// public static final String APP_THROTTLE_CONTEXT_PREFIX = "APP_THROTTLE_CONTEXT_";
//
// private static final Object lock = new Object();
//
// public ThrottleManager(String id, String key) {
// this.id = id;
// this.key = key;
// this.accessController = new AccessRateController();
// this.roleBasedAccessController = new RoleBasedAccessRateController();
// this.applicationRoleBasedAccessController = new RoleBasedAccessRateController();
// }
//
// public boolean doThrottleByConcurrency(boolean isResponse, ConcurrentAccessController concurrentAccessController) {
// boolean canAccess = true;
// if (concurrentAccessController != null) {
// // do the concurrency throttling
// int concurrentLimit = concurrentAccessController.getLimit();
// if (log.isDebugEnabled()) {
// log.debug("Concurrent access controller for ID: " + id +
// " allows: " + concurrentLimit + " concurrent accesses");
// }
// int available;
// if (!isResponse) {
// available = concurrentAccessController.getAndDecrement();
// canAccess = available > 0;
// if (log.isDebugEnabled()) {
// log.debug("Concurrency Throttle: Access " +
// (canAccess ? "allowed" : "denied") + " :: " + available
// + " of available of " + concurrentLimit + " connections");
// }
// } else {
// available = concurrentAccessController.incrementAndGet();
// if (log.isDebugEnabled()) {
// log.debug("Concurrency Throttle : Connection returned" + " :: " +
// available + " of available of " + concurrentLimit + " connections");
// }
// }
// }
// return canAccess;
// }
//
// public boolean throttleByAccessRate(String remoteIP, String domainName, ConfigurationContext cc, boolean isClusteringEnable,
// ConcurrentAccessController concurrentAccessController, Throttle throttle) {
// String callerId = null;
// boolean canAccess = true;
// //Using remote caller domain name , If there is a throttle configuration for
// // this domain name ,then throttling will occur according to that configuration
// if (domainName != null) {
// // do the domain based throttling
// if (log.isTraceEnabled()) {
// log.trace("The Domain Name of the caller is :" + domainName);
// }
// // loads the DomainBasedThrottleContext
// ThrottleContext context
// = throttle.getThrottleContext(ThrottleConstants.DOMAIN_BASED_THROTTLE_KEY);
// if (context != null) {
// //loads the DomainBasedThrottleConfiguration
// ThrottleConfiguration config = context.getThrottleConfiguration();
// if (config != null) {
// //checks the availability of a policy configuration for this domain name
// callerId = config.getConfigurationKeyOfCaller(domainName);
// if (callerId != null) { // there is configuration for this domain name
//
// //If this is a clustered env.
// //if (isClusteringEnable) {
// // context.setConfigurationContext(cc);
// context.setThrottleId(id);
// // }
//
// try {
// //Checks for access state
// AccessInformation accessInformation = accessController.canAccess(context,
// callerId, ThrottleConstants.DOMAIN_BASE);
// canAccess = accessInformation.isAccessAllowed();
// if (log.isDebugEnabled()) {
// log.debug("Access " + (canAccess ? "allowed" : "denied")
// + " for Domain Name : " + domainName);
// }
//
// //In the case of both of concurrency throttling and
// //rate based throttling have enabled ,
// //if the access rate less than maximum concurrent access ,
// //then it is possible to occur death situation.To avoid that reset,
// //if the access has denied by rate based throttling
// if (!canAccess && concurrentAccessController != null) {
// concurrentAccessController.incrementAndGet();
// if (isClusteringEnable) {
// cc.setProperty(key, concurrentAccessController);
// }
// }
// } catch (ThrottleException e) {
// handleException("Error occurred during throttling", e);
// }
// }
// }
// }
// } else {
// log.debug("The Domain name of the caller cannot be found");
// }
//
// //At this point , any configuration for the remote caller hasn't found ,
// //therefore trying to find a configuration policy based on remote caller ip
// if (callerId == null) {
// //do the IP-based throttling
// if (remoteIP == null) {
// if (log.isDebugEnabled()) {
// log.debug("The IP address of the caller cannot be found");
// }
// canAccess = true;
//
// } else {
// if (log.isDebugEnabled()) {
// log.debug("The IP Address of the caller is :" + remoteIP);
// }
// try {
// // Loads the IPBasedThrottleContext
// ThrottleContext context =
// throttle.getThrottleContext(ThrottleConstants.IP_BASED_THROTTLE_KEY);
// if (context != null) {
// //Loads the IPBasedThrottleConfiguration
// ThrottleConfiguration config = context.getThrottleConfiguration();
// if (config != null) {
// //Checks the availability of a policy configuration for this ip
// callerId = config.getConfigurationKeyOfCaller(remoteIP);
// if (callerId != null) { // there is configuration for this ip
//
// //For clustered env.
// // if (isClusteringEnable) {
// // context.setConfigurationContext(cc);
// context.setThrottleId(id);
// // }
// //Checks access state
// AccessInformation accessInformation = accessController.canAccess(
// context,
// callerId,
// ThrottleConstants.IP_BASE);
//
// canAccess = accessInformation.isAccessAllowed();
// if (log.isDebugEnabled()) {
// log.debug("Access " + (canAccess ? "allowed" : "denied")
// + " for IP : " + remoteIP);
// }
// //In the case of both of concurrency throttling and
// //rate based throttling have enabled ,
// //if the access rate less than maximum concurrent access ,
// //then it is possible to occur death situation.To avoid that reset,
// //if the access has denied by rate based throttling
// if (!canAccess && concurrentAccessController != null) {
// concurrentAccessController.incrementAndGet();
// if (isClusteringEnable) {
// cc.setProperty(key, concurrentAccessController);
// }
// }
// }
// }
// }
// } catch (ThrottleException e) {
// handleException("Error occurred during throttling", e);
// }
// }
// }
// return canAccess;
// }
//
// public boolean doRoleBasedAccessThrottling(boolean isClusteringEnable, ConfigurationContext cc, APIKeyValidationInfoDTO apiKeyValidationInfoDTO,
// String accessToken, Throttle throttle) {
// boolean canAccess = true;
// if (throttle.getThrottleContext(ThrottleConstants.ROLE_BASED_THROTTLE_KEY) == null) {
// //there is no throttle configuration for RoleBase Throttling
// //skip role base throttling
// return canAccess;
// }
// ConcurrentAccessController cac = null;
// if (isClusteringEnable) {
// // for clustered env.,gets it from axis configuration context
// cac = (ConcurrentAccessController) cc.getProperty(key);
// }
// String roleID;
// String applicationId;
// String applicationTier;
//
// if (apiKeyValidationInfoDTO != null) {
// roleID = apiKeyValidationInfoDTO.getTier();
// applicationId = apiKeyValidationInfoDTO.getApplicationId();
// applicationTier = apiKeyValidationInfoDTO.getApplicationTier();
// if (accessToken == null || roleID == null) {
// if(log.isDebugEnabled()) {
// log.warn("No consumer key or role information found on the request - " +
// "Throttling not applied");
// }
// return true;
// } else if (AppMConstants.UNLIMITED_TIER.equals(roleID) &&
// AppMConstants.UNLIMITED_TIER.equals(applicationTier)) {
// return true;
// }
// } else {
// log.warn("No authentication context information found on the request - " +
// "Throttling not applied");
// return true;
// }
// /*Domain name based throttling
// check whether a configuration has been defined for this role name or not
// loads the ThrottleContext */
// ThrottleContext context = throttle.getThrottleContext(
// ThrottleConstants.ROLE_BASED_THROTTLE_KEY);
// if (context == null) {
// log.warn("Unable to load throttle context");
// return true;
// }
// //Loads the ThrottleConfiguration
// ThrottleConfiguration config = context.getThrottleConfiguration();
// if (config != null) {
//
// String applicationRoleId = null;
// //If an application level tier has been specified and it is not 'Unlimited'
// if(applicationTier != null && !AppMConstants.UNLIMITED_TIER.equals(applicationTier)){
// //Get the configuration role of the application
// //applicationRoleId = config.getConfigurationKeyOfCaller(applicationTier);
// applicationRoleId = applicationTier;
// }
//
// AccessInformation info = null;
// //If application level throttling is applied
// if(applicationRoleId != null){
//
// ThrottleContext applicationThrottleContext = getApplicationThrottleContext(cc, applicationId, throttle);
// // if (isClusteringEnable) {
// // applicationThrottleContext.setConfigurationContext(cc);
// applicationThrottleContext.setThrottleId(id);
// // }
// //First throttle by application
// try {
// info = applicationRoleBasedAccessController.canAccess(applicationThrottleContext, applicationId, applicationRoleId);
// if(log.isDebugEnabled()){
// log.debug("Throttle by Application " + applicationId);
// log.debug("Allowed = " + info != null ? info.isAccessAllowed() : "false");
// }
// } catch (ThrottleException e) {
// log.warn("Exception occurred while performing role " +
// "based throttling", e);
// canAccess = false;
// }
//
// //check for the permission for access
// if (info != null && !info.isAccessAllowed()) {
//
// //In the case of both of concurrency throttling and
// //rate based throttling have enabled ,
// //if the access rate less than maximum concurrent access ,
// //then it is possible to occur death situation.To avoid that reset,
// //if the access has denied by rate based throttling
// if (cac != null) {
// cac.incrementAndGet();
// // set back if this is a clustered env
// if (isClusteringEnable) {
// cc.setProperty(key, cac);
// //replicate the current state of ConcurrentAccessController
// try {
// Replicator.replicate(cc, new String[]{key});
// } catch (ClusteringFault clusteringFault) {
// log.error("Error during replicating states", clusteringFault);
// }
// }
// }
// canAccess = false;
// return canAccess;
// }
// }
//
// //If WebApp Level throttling tier is Unlimited
// if (AppMConstants.UNLIMITED_TIER.equals(roleID)) {
// return true;
// }
//
// //check for configuration role of the caller
// String consumerRoleID = config.getConfigurationKeyOfCaller(roleID);
// if (consumerRoleID != null) {
// // If this is a clustered env.
// //if (isClusteringEnable) {
// // context.setConfigurationContext(cc);
// context.setThrottleId(id);
// //}
//
// try {
// //If the application has not been subscribed to the Unlimited Tier and
// //if application level throttling has passed
// if(!AppMConstants.UNLIMITED_TIER.equals(roleID) &&
// (info == null || info.isAccessAllowed())){
// //Throttle by access token
// info = roleBasedAccessController.canAccess(context, accessToken, consumerRoleID);
// }
// } catch (ThrottleException e) {
// log.warn("Exception occurred while performing role " +
// "based throttling", e);
// canAccess = false;
// }
//
// //check for the permission for access
// if (info != null && !info.isAccessAllowed()) {
//
// //In the case of both of concurrency throttling and
// //rate based throttling have enabled ,
// //if the access rate less than maximum concurrent access ,
// //then it is possible to occur death situation.To avoid that reset,
// //if the access has denied by rate based throttling
// if (cac != null) {
// cac.incrementAndGet();
// // set back if this is a clustered env
// if (isClusteringEnable) {
// cc.setProperty(key, cac);
// //replicate the current state of ConcurrentAccessController
// try {
// Replicator.replicate(cc, new String[]{key});
// } catch (ClusteringFault clusteringFault) {
// log.error("Error during replicating states", clusteringFault);
// }
// }
// }
// canAccess = false;
// }
// } else {
// log.warn("Unable to find the throttle policy for role: " + roleID);
// }
// }
// return canAccess;
// }
//
//
// private static ThrottleContext getApplicationThrottleContext(ConfigurationContext cc, String applicationId, Throttle throttle) {
// synchronized (lock) {
// Object throttleContext = cc.getProperty(APP_THROTTLE_CONTEXT_PREFIX + applicationId);
// if(throttleContext == null){
// ThrottleContext context = throttle.getThrottleContext(ThrottleConstants.ROLE_BASED_THROTTLE_KEY);
// cc.setProperty(APP_THROTTLE_CONTEXT_PREFIX + applicationId, context);
// return context;
// }
// return (ThrottleContext)throttleContext;
// }
// }
//
// private void handleException(String msg, Exception e) {
// log.error(msg, e);
// }
}
|
lakshani/carbon-mobile-appmgt
|
components/org.wso2.carbon.appmgt.core/src/main/java/org/wso2/carbon/appmgt/core/throttle/ThrottleManager.java
|
Java
|
apache-2.0
| 18,912
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Roslyn.Test.Utilities;
using Xunit;
using LSP = Microsoft.VisualStudio.LanguageServer.Protocol;
namespace Microsoft.CodeAnalysis.LanguageServer.UnitTests.Formatting
{
public class FormatDocumentTests : AbstractLanguageServerProtocolTests
{
[Fact]
public async Task TestFormatDocumentAsync()
{
var markup =
@"class A
{
void M()
{
int i = 1;{|caret:|}
}
}";
var expected =
@"class A
{
void M()
{
int i = 1;
}
}";
using var workspace = CreateTestWorkspace(markup, out var locations);
var documentURI = locations["caret"].Single().Uri;
var documentText = await workspace.CurrentSolution.GetDocumentFromURI(documentURI).GetTextAsync();
var results = await RunFormatDocumentAsync(workspace.CurrentSolution, documentURI);
var actualText = ApplyTextEdits(results, documentText);
Assert.Equal(expected, actualText);
}
private static async Task<LSP.TextEdit[]> RunFormatDocumentAsync(Solution solution, Uri uri)
=> await GetLanguageServer(solution).FormatDocumentAsync(solution, CreateDocumentFormattingParams(uri), new LSP.ClientCapabilities(), CancellationToken.None);
private static LSP.DocumentFormattingParams CreateDocumentFormattingParams(Uri uri)
=> new LSP.DocumentFormattingParams()
{
TextDocument = CreateTextDocumentIdentifier(uri),
Options = new LSP.FormattingOptions()
{
// TODO - Format should respect formatting options.
}
};
}
}
|
reaction1989/roslyn
|
src/Features/LanguageServer/ProtocolUnitTests/Formatting/FormatDocumentTests.cs
|
C#
|
apache-2.0
| 1,964
|
package com.vladmihalcea.book.hpjp.hibernate.association;
import com.vladmihalcea.book.hpjp.util.AbstractTest;
import org.junit.Test;
import javax.persistence.*;
import java.util.ArrayList;
import java.util.List;
/**
* @author Vlad Mihalcea
*/
public class BidirectionalOneToManyJoinColumnTest extends AbstractTest {
@Override
protected Class<?>[] entities() {
return new Class<?>[] {
Post.class,
PostComment.class,
};
}
@Test
public void testLifecycle() {
doInJPA(entityManager -> {
Post post = new Post("First post");
entityManager.persist(post);
PostComment comment1 = new PostComment("My first review");
post.addComment(comment1);
PostComment comment2 = new PostComment("My second review");
post.addComment(comment2);
entityManager.persist(post);
entityManager.flush();
post.removeComment(comment1);
entityManager.flush();
});
}
@Entity(name = "Post")
@Table(name = "post")
public static class Post {
@Id
@GeneratedValue
private Long id;
private String title;
@OneToMany(cascade = CascadeType.ALL, orphanRemoval = true)
@JoinColumn(name = "post_id")
private List<PostComment> comments = new ArrayList<>();
public Post() {}
public Post(String title) {
this.title = title;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public List<PostComment> getComments() {
return comments;
}
public void addComment(PostComment comment) {
comments.add(comment);
comment.setPost(this);
}
public void removeComment(PostComment comment) {
comments.remove(comment);
comment.setPost(null);
}
}
@Entity(name = "PostComment")
@Table(name = "post_comment")
public static class PostComment {
@Id
@GeneratedValue
private Long id;
private String review;
@ManyToOne
@JoinColumn(name = "post_id", insertable = false, updatable = false)
private Post post;
public PostComment() {}
public PostComment(String review) {
this.review = review;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getReview() {
return review;
}
public void setReview(String review) {
this.review = review;
}
public Post getPost() {
return post;
}
public void setPost(Post post) {
this.post = post;
}
}
}
|
vladmihalcea/high-performance-java-persistence
|
core/src/test/java/com/vladmihalcea/book/hpjp/hibernate/association/BidirectionalOneToManyJoinColumnTest.java
|
Java
|
apache-2.0
| 3,083
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_29) on Thu Jan 19 00:04:41 PST 2012 -->
<TITLE>
ThriftBytesToTuple
</TITLE>
<META NAME="date" CONTENT="2012-01-19">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="ThriftBytesToTuple";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../com/twitter/elephantbird/pig/piggybank/ProtobufBytesToTuple.html" title="class in com.twitter.elephantbird.pig.piggybank"><B>PREV CLASS</B></A>
NEXT CLASS</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?com/twitter/elephantbird/pig/piggybank/ThriftBytesToTuple.html" target="_top"><B>FRAMES</B></A>
<A HREF="ThriftBytesToTuple.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | <A HREF="#fields_inherited_from_class_org.apache.pig.EvalFunc">FIELD</A> | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | <A HREF="#constructor_detail">CONSTR</A> | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<!-- ======== START OF CLASS DATA ======== -->
<H2>
<FONT SIZE="-1">
com.twitter.elephantbird.pig.piggybank</FONT>
<BR>
Class ThriftBytesToTuple<M extends org.apache.thrift.TBase<?,?>></H2>
<PRE>
java.lang.Object
<IMG SRC="../../../../../resources/inherit.gif" ALT="extended by ">org.apache.pig.EvalFunc<org.apache.pig.data.Tuple>
<IMG SRC="../../../../../resources/inherit.gif" ALT="extended by "><B>com.twitter.elephantbird.pig.piggybank.ThriftBytesToTuple<M></B>
</PRE>
<HR>
<DL>
<DT><PRE>public class <B>ThriftBytesToTuple<M extends org.apache.thrift.TBase<?,?>></B><DT>extends org.apache.pig.EvalFunc<org.apache.pig.data.Tuple></DL>
</PRE>
<P>
The base class for a Pig UDF that takes as input a tuple containing a single element, the
bytes of a serialized Thrift object as a DataByteArray. It outputs the Thrift object in
expanded form. The specific Thrift class is supplied through an argument to the
UDF constructor as in :
<pre>
DEFINE PersonThriftBytesToTuple com.twitter.elephantbird.pig.piggybank.ThriftBytesToTuple('com.twitter.elephantbird.thrift.Person');
persons = FOREACH thriftobjects GENERATE PersonThriftBytesToTuple($0);
</pre>
<P>
<P>
<HR>
<P>
<!-- =========== FIELD SUMMARY =========== -->
<A NAME="field_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Field Summary</B></FONT></TH>
</TR>
</TABLE>
<A NAME="fields_inherited_from_class_org.apache.pig.EvalFunc"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left"><B>Fields inherited from class org.apache.pig.EvalFunc</B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE>log, pigLogger, reporter, returnType</CODE></TD>
</TR>
</TABLE>
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<A NAME="constructor_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Constructor Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../../../../com/twitter/elephantbird/pig/piggybank/ThriftBytesToTuple.html#ThriftBytesToTuple(java.lang.String)">ThriftBytesToTuple</A></B>(java.lang.String thriftClassName)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<!-- ========== METHOD SUMMARY =========== -->
<A NAME="method_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Method Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> org.apache.pig.data.Tuple</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../com/twitter/elephantbird/pig/piggybank/ThriftBytesToTuple.html#exec(org.apache.pig.data.Tuple)">exec</A></B>(org.apache.pig.data.Tuple input)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> org.apache.pig.impl.logicalLayer.schema.Schema</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../com/twitter/elephantbird/pig/piggybank/ThriftBytesToTuple.html#outputSchema(org.apache.pig.impl.logicalLayer.schema.Schema)">outputSchema</A></B>(org.apache.pig.impl.logicalLayer.schema.Schema input)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<A NAME="methods_inherited_from_class_org.apache.pig.EvalFunc"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left"><B>Methods inherited from class org.apache.pig.EvalFunc</B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE>finish, getArgToFuncMapping, getCacheFiles, getLogger, getPigLogger, getReporter, getReturnType, getSchemaName, isAsynchronous, progress, setPigLogger, setReporter, warn</CODE></TD>
</TR>
</TABLE>
<A NAME="methods_inherited_from_class_java.lang.Object"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left"><B>Methods inherited from class java.lang.Object</B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</CODE></TD>
</TR>
</TABLE>
<P>
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<A NAME="constructor_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Constructor Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="ThriftBytesToTuple(java.lang.String)"><!-- --></A><H3>
ThriftBytesToTuple</H3>
<PRE>
public <B>ThriftBytesToTuple</B>(java.lang.String thriftClassName)</PRE>
<DL>
</DL>
<!-- ============ METHOD DETAIL ========== -->
<A NAME="method_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Method Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="exec(org.apache.pig.data.Tuple)"><!-- --></A><H3>
exec</H3>
<PRE>
public org.apache.pig.data.Tuple <B>exec</B>(org.apache.pig.data.Tuple input)
throws java.io.IOException</PRE>
<DL>
<DD><DL>
<DT><B>Specified by:</B><DD><CODE>exec</CODE> in class <CODE>org.apache.pig.EvalFunc<org.apache.pig.data.Tuple></CODE></DL>
</DD>
<DD><DL>
<DT><B>Throws:</B>
<DD><CODE>java.io.IOException</CODE></DL>
</DD>
</DL>
<HR>
<A NAME="outputSchema(org.apache.pig.impl.logicalLayer.schema.Schema)"><!-- --></A><H3>
outputSchema</H3>
<PRE>
public org.apache.pig.impl.logicalLayer.schema.Schema <B>outputSchema</B>(org.apache.pig.impl.logicalLayer.schema.Schema input)</PRE>
<DL>
<DD><DL>
<DT><B>Overrides:</B><DD><CODE>outputSchema</CODE> in class <CODE>org.apache.pig.EvalFunc<org.apache.pig.data.Tuple></CODE></DL>
</DD>
<DD><DL>
</DL>
</DD>
</DL>
<!-- ========= END OF CLASS DATA ========= -->
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../com/twitter/elephantbird/pig/piggybank/ProtobufBytesToTuple.html" title="class in com.twitter.elephantbird.pig.piggybank"><B>PREV CLASS</B></A>
NEXT CLASS</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?com/twitter/elephantbird/pig/piggybank/ThriftBytesToTuple.html" target="_top"><B>FRAMES</B></A>
<A HREF="ThriftBytesToTuple.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | <A HREF="#fields_inherited_from_class_org.apache.pig.EvalFunc">FIELD</A> | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | <A HREF="#constructor_detail">CONSTR</A> | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
</BODY>
</HTML>
|
ketralnis/elephant-bird
|
javadoc/com/twitter/elephantbird/pig/piggybank/ThriftBytesToTuple.html
|
HTML
|
apache-2.0
| 13,176
|
/**
*/
package edu.kit.ipd.sdq.kamp4aps.model.aPS.ComponentRepository.impl;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.ComponentRepository.ComponentRepositoryPackage;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.ComponentRepository.PowerSupply;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.ComponentRepository.Valve;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.InterfaceRepository.PneumaticSupply;
import edu.kit.ipd.sdq.kamp4aps.model.aPS.InterfaceRepository.SignalInterface;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Valve</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link edu.kit.ipd.sdq.kamp4aps.model.aPS.ComponentRepository.impl.ValveImpl#getPneumaticsupply <em>Pneumaticsupply</em>}</li>
* <li>{@link edu.kit.ipd.sdq.kamp4aps.model.aPS.ComponentRepository.impl.ValveImpl#getPowersupply <em>Powersupply</em>}</li>
* <li>{@link edu.kit.ipd.sdq.kamp4aps.model.aPS.ComponentRepository.impl.ValveImpl#getSignalinterface <em>Signalinterface</em>}</li>
* </ul>
*
* @generated
*/
public abstract class ValveImpl extends ComponentImpl implements Valve {
/**
* The cached value of the '{@link #getPneumaticsupply() <em>Pneumaticsupply</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPneumaticsupply()
* @generated
* @ordered
*/
protected PneumaticSupply pneumaticsupply;
/**
* The cached value of the '{@link #getPowersupply() <em>Powersupply</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPowersupply()
* @generated
* @ordered
*/
protected PowerSupply powersupply;
/**
* The cached value of the '{@link #getSignalinterface() <em>Signalinterface</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSignalinterface()
* @generated
* @ordered
*/
protected SignalInterface signalinterface;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ValveImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return ComponentRepositoryPackage.Literals.VALVE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public PneumaticSupply getPneumaticsupply() {
if (pneumaticsupply != null && pneumaticsupply.eIsProxy()) {
InternalEObject oldPneumaticsupply = (InternalEObject)pneumaticsupply;
pneumaticsupply = (PneumaticSupply)eResolveProxy(oldPneumaticsupply);
if (pneumaticsupply != oldPneumaticsupply) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, ComponentRepositoryPackage.VALVE__PNEUMATICSUPPLY, oldPneumaticsupply, pneumaticsupply));
}
}
return pneumaticsupply;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public PneumaticSupply basicGetPneumaticsupply() {
return pneumaticsupply;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setPneumaticsupply(PneumaticSupply newPneumaticsupply) {
PneumaticSupply oldPneumaticsupply = pneumaticsupply;
pneumaticsupply = newPneumaticsupply;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ComponentRepositoryPackage.VALVE__PNEUMATICSUPPLY, oldPneumaticsupply, pneumaticsupply));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public PowerSupply getPowersupply() {
if (powersupply != null && powersupply.eIsProxy()) {
InternalEObject oldPowersupply = (InternalEObject)powersupply;
powersupply = (PowerSupply)eResolveProxy(oldPowersupply);
if (powersupply != oldPowersupply) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, ComponentRepositoryPackage.VALVE__POWERSUPPLY, oldPowersupply, powersupply));
}
}
return powersupply;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public PowerSupply basicGetPowersupply() {
return powersupply;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setPowersupply(PowerSupply newPowersupply) {
PowerSupply oldPowersupply = powersupply;
powersupply = newPowersupply;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ComponentRepositoryPackage.VALVE__POWERSUPPLY, oldPowersupply, powersupply));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public SignalInterface getSignalinterface() {
if (signalinterface != null && signalinterface.eIsProxy()) {
InternalEObject oldSignalinterface = (InternalEObject)signalinterface;
signalinterface = (SignalInterface)eResolveProxy(oldSignalinterface);
if (signalinterface != oldSignalinterface) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, ComponentRepositoryPackage.VALVE__SIGNALINTERFACE, oldSignalinterface, signalinterface));
}
}
return signalinterface;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public SignalInterface basicGetSignalinterface() {
return signalinterface;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setSignalinterface(SignalInterface newSignalinterface) {
SignalInterface oldSignalinterface = signalinterface;
signalinterface = newSignalinterface;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ComponentRepositoryPackage.VALVE__SIGNALINTERFACE, oldSignalinterface, signalinterface));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ComponentRepositoryPackage.VALVE__PNEUMATICSUPPLY:
if (resolve) return getPneumaticsupply();
return basicGetPneumaticsupply();
case ComponentRepositoryPackage.VALVE__POWERSUPPLY:
if (resolve) return getPowersupply();
return basicGetPowersupply();
case ComponentRepositoryPackage.VALVE__SIGNALINTERFACE:
if (resolve) return getSignalinterface();
return basicGetSignalinterface();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ComponentRepositoryPackage.VALVE__PNEUMATICSUPPLY:
setPneumaticsupply((PneumaticSupply)newValue);
return;
case ComponentRepositoryPackage.VALVE__POWERSUPPLY:
setPowersupply((PowerSupply)newValue);
return;
case ComponentRepositoryPackage.VALVE__SIGNALINTERFACE:
setSignalinterface((SignalInterface)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case ComponentRepositoryPackage.VALVE__PNEUMATICSUPPLY:
setPneumaticsupply((PneumaticSupply)null);
return;
case ComponentRepositoryPackage.VALVE__POWERSUPPLY:
setPowersupply((PowerSupply)null);
return;
case ComponentRepositoryPackage.VALVE__SIGNALINTERFACE:
setSignalinterface((SignalInterface)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case ComponentRepositoryPackage.VALVE__PNEUMATICSUPPLY:
return pneumaticsupply != null;
case ComponentRepositoryPackage.VALVE__POWERSUPPLY:
return powersupply != null;
case ComponentRepositoryPackage.VALVE__SIGNALINTERFACE:
return signalinterface != null;
}
return super.eIsSet(featureID);
}
} //ValveImpl
|
KAMP-Research/KAMP4APS
|
edu.kit.ipd.sdq.kamp4aps.aps/src/edu/kit/ipd/sdq/kamp4aps/model/aPS/ComponentRepository/impl/ValveImpl.java
|
Java
|
apache-2.0
| 8,415
|
angular.module( 'qkstrt', [
'templates-app',
'templates-common',
'qkstrt.home',
'ui.router'
])
.config( function myAppConfig ( $stateProvider, $urlRouterProvider ) {
$urlRouterProvider.otherwise( '/home' );
})
.run( function run () {
})
.controller( 'AppCtrl', function AppCtrl ( $scope, $location ) {
$scope.$on('$stateChangeSuccess', function(event, toState, toParams, fromState, fromParams){
if ( angular.isDefined( toState.data.pageTitle ) ) {
$scope.pageTitle = toState.data.pageTitle + ' | qkstrt' ;
}
});
})
;
|
troylelandshields/qkstrt
|
front/src/app/app.js
|
JavaScript
|
apache-2.0
| 550
|
/*
* Copyright 2019 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.ember.core.h2
import cats.data._
import scodec.bits._
private[h2] trait Hpack[F[_]] {
def encodeHeaders(headers: NonEmptyList[(String, String, Boolean)]): F[ByteVector]
def decodeHeaders(bv: ByteVector): F[NonEmptyList[(String, String)]]
}
private[h2] object Hpack extends HpackPlatform {}
|
rossabaker/http4s
|
ember-core/shared/src/main/scala/org/http4s/ember/core/h2/Hpack.scala
|
Scala
|
apache-2.0
| 911
|
//*********************************************************//
// Copyright (c) Microsoft. All rights reserved.
//
// Apache 2.0 License
//
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
//
//*********************************************************//
using System;
using System.Windows.Forms;
namespace Microsoft.NodejsTools.Options {
public partial class NodejsGeneralOptionsControl : UserControl {
private const int SurveyNewsNeverIndex = 0;
private const int SurveyNewsOnceDayIndex = 1;
private const int SurveyNewsOnceWeekIndex = 2;
private const int SurveyNewsOnceMonthIndex = 3;
public NodejsGeneralOptionsControl() {
InitializeComponent();
}
internal SurveyNewsPolicy SurveyNewsCheckCombo {
get {
switch (_surveyNewsCheckCombo.SelectedIndex) {
case SurveyNewsNeverIndex:
return SurveyNewsPolicy.Disabled;
case SurveyNewsOnceDayIndex:
return SurveyNewsPolicy.CheckOnceDay;
case SurveyNewsOnceWeekIndex:
return SurveyNewsPolicy.CheckOnceWeek;
case SurveyNewsOnceMonthIndex:
return SurveyNewsPolicy.CheckOnceMonth;
default:
return SurveyNewsPolicy.Disabled;
}
}
set {
switch (value) {
case SurveyNewsPolicy.Disabled:
_surveyNewsCheckCombo.SelectedIndex = SurveyNewsNeverIndex;
break;
case SurveyNewsPolicy.CheckOnceDay:
_surveyNewsCheckCombo.SelectedIndex = SurveyNewsOnceDayIndex;
break;
case SurveyNewsPolicy.CheckOnceWeek:
_surveyNewsCheckCombo.SelectedIndex = SurveyNewsOnceWeekIndex;
break;
case SurveyNewsPolicy.CheckOnceMonth:
_surveyNewsCheckCombo.SelectedIndex = SurveyNewsOnceMonthIndex;
break;
}
}
}
internal void SyncControlWithPageSettings(NodejsGeneralOptionsPage page) {
SurveyNewsCheckCombo = page.SurveyNewsCheck;
_waitOnAbnormalExit.Checked = page.WaitOnAbnormalExit;
_waitOnNormalExit.Checked = page.WaitOnNormalExit;
_editAndContinue.Checked = page.EditAndContinue;
_checkForLongPaths.Checked = page.CheckForLongPaths;
}
internal void SyncPageWithControlSettings(NodejsGeneralOptionsPage page) {
page.SurveyNewsCheck = SurveyNewsCheckCombo;
page.WaitOnAbnormalExit = _waitOnAbnormalExit.Checked;
page.WaitOnNormalExit = _waitOnNormalExit.Checked;
page.EditAndContinue = _editAndContinue.Checked;
page.CheckForLongPaths = _checkForLongPaths.Checked;
}
}
}
|
mauricionr/nodejstools
|
Nodejs/Product/Nodejs/Options/NodejsGeneralOptionsControl.cs
|
C#
|
apache-2.0
| 3,515
|
package bob.sun.bender.fragments;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.ListView;
import com.huami.mibandscan.MiBandScanStatus;
import bob.sun.bender.R;
import bob.sun.bender.adapters.SearchBandAdapter;
import bob.sun.bender.controller.OnBandFoundListener;
import bob.sun.bender.controller.OnTickListener;
import bob.sun.bender.model.MIBandSearchInstance;
import bob.sun.bender.model.MiBandDevice;
import bob.sun.bender.model.SelectionDetail;
/**
* 连接手环部分界面逻辑
* Created by Johnny on 西暦17/04/01.
*/
public class BandConnectFragment extends Fragment implements OnTickListener, OnBandFoundListener {
private static final String TAG = "BandConnectFragment";
@SuppressWarnings("FieldCanBeLocal")
private ListView listView;
private Button searchBtn;
private MIBandSearchInstance miBandSearchInstance;
public static final String preference_file_key = "BandPref";
SearchBandAdapter searchBandAdapter;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup parent, Bundle savedInstanceState){
View ret = inflater.inflate(R.layout.layout_connect_band,parent,false);
listView = (ListView) ret.findViewById(R.id.id_list_band);
searchBtn = (Button) ret.findViewById(R.id.id_search_button);
searchBandAdapter = new SearchBandAdapter();
listView.setAdapter(searchBandAdapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
// 用户点击的手环设置为绑定手环
MiBandDevice device = searchBandAdapter.getItem(position);
saveBandMac(device);
}
});
miBandSearchInstance = MIBandSearchInstance.getInstance();
searchBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (!miBandSearchInstance.isStartScan()) {
if (miBandSearchInstance.startScan(BandConnectFragment.this)) {
searchBtn.setText(R.string.stop_scan);
}
} else {
miBandSearchInstance.stopScan();
searchBtn.setText(R.string.start_scan);
}
}
});
return ret;
}
@Override
public void onStop() {
super.onStop();
miBandSearchInstance.stopScan();
}
@Override
public void onNextTick() {
}
@Override
public void onPreviousTick() {
}
@Override
public SelectionDetail getCurrentSelection() {
SelectionDetail selectionDetail = new SelectionDetail();
selectionDetail.setMenuType(SelectionDetail.MENU_TYPE_UNUSED);
return selectionDetail;
}
@Override
public void onData(MiBandDevice device) {
searchBandAdapter.add(device);
}
@Override
public void onStatus(MiBandScanStatus scanStatus) {
// 处理手环sdk的状态回调
}
public void saveBandMac(MiBandDevice device) {
Log.i(TAG, "saveBandMac: " + device.getBandMac());
SharedPreferences sharedPref = getActivity()
.getSharedPreferences(preference_file_key,Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPref.edit();
editor.putString("BAND_MAC", device.getBandMac());
editor.apply();
}
}
|
JohnnySun/MusicPlayer
|
app/src/main/java/bob/sun/bender/fragments/BandConnectFragment.java
|
Java
|
apache-2.0
| 3,813
|
<!DOCTYPE HTML>
<html>
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>sap.m.Toolbar</title>
<script src="shared-config.js"></script>
<script src="../../../resources/sap-ui-core.js"
id="sap-ui-bootstrap" data-sap-ui-libs="sap.m">
</script>
<style type="text/css"></style>
<script>
// initialize
var oUriParameters = jQuery.sap.getUriParameters();
jQuery.sap.require("sap.m.MessageToast");
jQuery.sap.require("sap.m.ToolbarDesign");
jQuery.sap.require("sap.m.Toolbar");
jQuery.sap.require("sap.ui.core.IconPool");
// for behaviour testing
if (oUriParameters.get("oldFlex")) {
sap.m.ToolbarRenderer.hasNewFlexBoxSupport = false;
}
var oApp = new sap.m.App();
var sAddIconURI = sap.ui.core.IconPool.getIconURI("add");
function getToolbarContent (sText, sSelId) {
return [
new sap.m.Button({text : "Button"}),
new sap.m.ToolbarSeparator(),
new sap.m.Button({text : "Test"}),
new sap.m.Label({
text: "Choose:",
labelFor: sSelId
}),
new sap.m.Select(sSelId, {
autoAdjustWidth: true,
items : [
new sap.ui.core.Item({
key: "0",
text: "item 0"
}),
new sap.ui.core.Item({
key: "0",
text: "loooooooooooooong item"
})
]
}),
new sap.m.SearchField({
placeholder : "Search",
width : "200px"
}),
new sap.m.SegmentedButton({
buttons: [
new sap.m.Button({
type: sap.m.ButtonType.Default,
icon: sAddIconURI,
enabled: true
}),
new sap.m.Button({
type: sap.m.ButtonType.Default,
icon: sAddIconURI,
enabled: true
}),
new sap.m.Button({
type: sap.m.ButtonType.Default,
icon: sAddIconURI,
enabled: true
}),
new sap.m.Button({
type: sap.m.ButtonType.Default,
icon: sAddIconURI,
enabled: true
})
]
}),
new sap.m.Button({text : "Button"}),
new sap.m.Select({
type: sap.m.SelectType.IconOnly,
icon: sap.ui.core.IconPool.getIconURI("filter"),
autoAdjustWidth: true,
items : [
new sap.ui.core.Item({
key: "0",
text: "item 0"
}),
new sap.ui.core.Item({
key: "0",
text: "loooooooooooooong item"
})
]
}),
new sap.m.ToolbarSpacer(),
new sap.m.Label({ text : sText }),
new sap.m.ToolbarSpacer(),
new sap.m.Label({
text: "Choose:",
labelFor: sSelId + 'innerSelect'
}),
new sap.m.Select(sSelId + 'innerSelect', {
items : [
new sap.ui.core.Item({
key: "0",
text: "item 0"
}),
new sap.ui.core.Item({
key: "0",
text: "loooooooooooooong item"
})
]
})
]
}
function getInputToolbarContent () {
return [
new sap.m.Label({
text : "Input controls"
}),
new sap.m.ToolbarSpacer(),
new sap.m.ToggleButton({
text: "Press to toggle"
}),
new sap.m.Input({
placeholder: "Input",
width: "150px"
}),
new sap.m.DateTimePicker({
placeholder: "DateTimePicker",
width: "250px"
}),
new sap.m.CheckBox({
text : "Checkbox"
}),
new sap.m.RadioButton({
text : "Option a",
group : "a"
}),
new sap.m.RadioButton({
text : "Option b",
group : "b"
})
];
}
TBHeader = new sap.m.Toolbar({
content : getToolbarContent("This is a Header", "selH")
});
TBSubHeader = new sap.m.Toolbar({
content : getToolbarContent("This is a SubHeader", "selSubH")
});
TBFooter = new sap.m.Toolbar({
content : getToolbarContent("This is a Footer", "selF")
});
// test toolbars in dialog
var oDialog = new sap.m.Dialog({
title: "Toolbar Dialog",
content: [
oList = new sap.m.List({
headerToolbar : new sap.m.Toolbar({
content : [
new sap.m.Label({
text : "This is a header"
}),
new sap.m.ToolbarSpacer(),
new sap.m.Button({
text : "Remove",
type : "Reject",
press : function() {
oList.getHeaderToolbar().destroy();
}
})
]
})
}),
oList.getHeaderToolbar().clone().setDesign("Info").setHeight("auto")
],
beginButton: new sap.m.Button({
text: "Close",
press : function() {
oDialog.close()
}
})
});
var iMessageToastDuration = 500;
// add toolbars to the page
var oPage = new sap.m.Page("toolbar-page", {
customHeader : TBHeader,
subHeader : TBSubHeader,
footer : TBFooter,
title : "Toolbar",
enableScrolling : true,
headerContent : new sap.m.Button({
text : "Open Dialog",
press : function() {
oDialog.open()
}
}),
content : [
new sap.m.Toolbar("info_bar", {
active : true,
design : sap.m.ToolbarDesign.Info,
tooltip : "This is a info bar",
content : [
new sap.m.Label({
text : "Plain old infobar"
}),
new sap.m.Label({text: "Text"}),
new sap.m.Label({text: "Text"}),
new sap.m.ToolbarSpacer(),
new sap.m.Label({text: "Text"}),
new sap.m.Label({text: "Text"}),
new sap.ui.core.Icon({
src : "sap-icon://collaborate",
width : "2rem"
})
]
}).attachPress(function(oEvent) {
sap.m.MessageToast.show("InfoBar Pressed! Sorce: " + oEvent.getParameter("srcControl").getId(), {
duration: iMessageToastDuration
});
}),
new sap.m.Toolbar({
design : sap.m.ToolbarDesign.Solid,
height : "auto",
content : [
new sap.m.Label({
text : "This text should never get shrink.",
layoutData : new sap.m.ToolbarLayoutData({
shrinkable : false
})
}),
new sap.m.ToolbarSpacer(),
new sap.m.Button({
text : "This Button is shrinkable up to 100px",
icon: "sap-icon://person-placeholder",
layoutData : new sap.m.ToolbarLayoutData({
shrinkable : true,
minWidth: "100px"
}),
press : function() {
sap.m.MessageToast.show("Shrinkable button is pressed.", {
at: "center center",
duration: iMessageToastDuration
});
}
})
]
}),
new sap.m.Toolbar({
height: "auto",
content : [
new sap.m.Label({
text : "Percent Width Controls",
labelFor: "searchField",
width: "15%"
}),
new sap.m.ToolbarSpacer(),
new sap.m.SearchField("searchField", {
placeholder : "This has 100% width by default"
})
]
}),
new sap.m.Toolbar({
height: "auto",
design : sap.m.ToolbarDesign.Transparent,
content : [
new sap.m.Label({
text : "Segmented Button"
}),
new sap.m.ToolbarSpacer(),
new sap.m.SegmentedButton({
selectedButton : "sbi1",
buttons : [
new sap.m.Button("sbi1", {
text : "Seg-"
}),
new sap.m.Button({
text : "-men-"
}),
new sap.m.Button({
text : "-ted Button"
})
]
})
]
}),
new sap.m.Toolbar({
height: "auto",
design : sap.m.ToolbarDesign.Transparent,
content : [
new sap.m.Label({
text : "Lots of Buttons"
}),
new sap.m.ToolbarSpacer(),
new sap.m.Button({
text : "1st Button"
}),
new sap.m.Button({
type : "Accept",
text : "Second Button Shrinkable",
icon: "sap-icon://person-placeholder",
layoutData : new sap.m.ToolbarLayoutData({
shrinkable : true
})
}),
new sap.m.Button({
text : "3rd Button"
}),
new sap.m.Button({
type : "Reject",
text : "Fourth Button Shrinkable",
icon: "sap-icon://person-placeholder",
layoutData : new sap.m.ToolbarLayoutData({
shrinkable : true
})
}),
new sap.m.Button({
text : "5th Button"
})
]
}),
new sap.m.Toolbar({
content : getInputToolbarContent()
}).applyTagAndContextClassFor("header"),
new sap.m.Toolbar({
content : getInputToolbarContent()
}).applyTagAndContextClassFor("subheader"),
new sap.m.Toolbar({
content : getInputToolbarContent()
}).applyTagAndContextClassFor("footer"),
new sap.m.Toolbar({
design : sap.m.ToolbarDesign.Transparent,
content : getInputToolbarContent()
}),
new sap.m.Toolbar({
design : sap.m.ToolbarDesign.Solid,
content : getInputToolbarContent()
}),
new sap.m.Toolbar({
height : "auto",
design : sap.m.ToolbarDesign.Solid,
content : [
new sap.m.Label({
text : "This text should never get shrink This text should never get shrink",
layoutData : new sap.m.ToolbarLayoutData({
shrinkable : false
})
}),
new sap.m.ToolbarSpacer(),
new sap.m.Button({
text : "This Button is shrinkable",
layoutData : new sap.m.ToolbarLayoutData({
shrinkable : true
})
}),
new sap.m.ToolbarSpacer(),
new sap.m.Button({
text : "This Button is shrinkable up to 100px",
layoutData : new sap.m.ToolbarLayoutData({
shrinkable : true,
minWidth: "100px",
maxWidth: "250px"
})
})
]
}),
new sap.m.Toolbar({
design : sap.m.ToolbarDesign.Solid,
content : [
new sap.m.Button({
text : "Button1",
type : "Accept",
width : "50%"
}),
new sap.m.Button({
text : "Button2",
type : "Reject",
width : "50%"
})
]
})
]
});
var oApp = new sap.m.App();
oApp.addPage(oPage).placeAt("body");
var oB = new sap.m.Button("size_btn", {
text : "Change page size",
press : function() { jQuery("#toolbar-page").width("300px"); }
});
oB.placeAt("body");
</script>
</head>
<body id="body" class="sapUiBody">
</body>
</html>
|
cschuff/openui5
|
src/sap.m/test/sap/m/Toolbar.html
|
HTML
|
apache-2.0
| 9,531
|
/**
* Visual Blocks Language
*
* Copyright 2012 Fred Lin.
* https://github.com/gasolin/BlocklyDuino
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Helper functions for generating Arduino blocks.
* @author gasolin@gmail.com (Fred Lin)
*/
'use strict';
//To support syntax defined in http://arduino.cc/en/Reference/HomePage
//define blocks
if (!Blockly.Language) Blockly.Language = {};
Blockly.Language.base_delay = {
category: 'Control',
helpUrl: 'http://arduino.cc/en/Reference/delay',
init: function() {
this.setColour(120);
this.appendValueInput("DELAY_TIME", Number)
.appendTitle("Delay")
.setCheck(Number);
this.setInputsInline(true);
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setTooltip('Delay specific time');
}
};
Blockly.Language.base_map = {
category: 'Math',
helpUrl: 'http://arduino.cc/en/Reference/map',
init: function() {
this.setColour(230);
this.appendValueInput("NUM", Number)
.appendTitle("Map ")
.setCheck(Number);
this.appendValueInput("DMAX", Number)
.appendTitle("value to [0-")
.setCheck(Number);
this.appendDummyInput("")
.appendTitle("]");
this.setInputsInline(true);
this.setOutput(true);
this.setTooltip('Re-maps a number from [0-1024] to another.');
}
};
Blockly.Language.inout_buildin_led = {
category: 'In/Out',
helpUrl: 'http://arduino.cc/en/Reference/DigitalWrite',
init: function() {
this.setColour(190);
this.appendDummyInput("")
.appendTitle("Build-in LED Stat")
.appendTitle(new Blockly.FieldDropdown([["HIGH", "HIGH"], ["LOW", "LOW"]]), "STAT");
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setTooltip('light or off the build-in LED');
}
};
Blockly.Language.inout_digital_write = {
category: 'In/Out',
helpUrl: 'http://arduino.cc/en/Reference/DigitalWrite',
init: function() {
this.setColour(230);
this.appendDummyInput("")
.appendTitle("DigitalWrite PIN#")
.appendTitle(new Blockly.FieldDropdown(profile.default.digital), "PIN")
.appendTitle("Stat")
.appendTitle(new Blockly.FieldDropdown([["HIGH", "HIGH"], ["LOW", "LOW"]]), "STAT");
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setTooltip('Write digital value to a specific Port');
}
};
Blockly.Language.inout_digital_read = {
category: 'In/Out',
helpUrl: 'http://arduino.cc/en/Reference/DigitalRead',
init: function() {
this.setColour(230);
this.appendDummyInput("")
.appendTitle("DigitalRead PIN#")
.appendTitle(new Blockly.FieldDropdown(profile.default.digital), "PIN");
this.setOutput(true, Boolean);
this.setTooltip('');
}
};
Blockly.Language.inout_analog_write = {
category: 'In/Out',
helpUrl: 'http://arduino.cc/en/Reference/AnalogWrite',
init: function() {
this.setColour(230);
this.appendDummyInput("")
.appendTitle("AnalogWrite PIN#")
.appendTitle(new Blockly.FieldDropdown(profile.default.analog), "PIN");
this.appendValueInput("NUM", Number)
.appendTitle("value")
.setCheck(Number);
this.setInputsInline(true);
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setTooltip('Write analog value between 0 and 255 to a specific Port');
}
};
Blockly.Language.inout_analog_read = {
category: 'In/Out',
helpUrl: 'http://arduino.cc/en/Reference/AnalogRead',
init: function() {
this.setColour(230);
this.appendDummyInput("")
.appendTitle("AnalogRead PIN#")
.appendTitle(new Blockly.FieldDropdown(profile.default.analog), "PIN");
this.setOutput(true, Number);
this.setTooltip('Return value between 0 and 1024');
}
};
Blockly.Language.inout_highlow = {
category: 'In/Out',
helpUrl: 'http://arduino.cc/en/Reference/Constants',
init: function() {
this.setColour(230);
this.appendDummyInput("")
.appendTitle(new Blockly.FieldDropdown([["HIGH", "HIGH"], ["LOW", "LOW"]]), 'BOOL')
this.setOutput(true, Boolean);
this.setTooltip(Blockly.LANG_LOGIC_BOOLEAN_TOOLTIP_1);
}
};
//servo block
//http://www.seeedstudio.com/depot/emax-9g-es08a-high-sensitive-mini-servo-p-760.html?cPath=170_171
Blockly.Language.servo_move = {
category: 'Servo',
helpUrl: 'http://www.arduino.cc/playground/ComponentLib/servo',
init: function() {
this.setColour(190);
this.appendDummyInput("")
.appendTitle("Servo")
.appendTitle(new Blockly.FieldImage("http://www.seeedstudio.com/depot/images/product/a991.jpg", 64, 64))
.appendTitle("PIN#")
.appendTitle(new Blockly.FieldDropdown(profile.default.digital), "PIN")
this.appendValueInput("DEGREE", Number)
.setCheck(Number)
.setAlign(Blockly.ALIGN_RIGHT)
.appendTitle("Degree (0~180)");
this.appendValueInput("DELAY_TIME", Number)
.setCheck(Number)
.setAlign(Blockly.ALIGN_RIGHT)
.appendTitle("Delay");
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setTooltip('move between 0~180 degree');
}
};
Blockly.Language.servo_read_degrees = {
category: 'Servo',
helpUrl: 'http://www.arduino.cc/playground/ComponentLib/servo',
init: function() {
this.setColour(190);
this.appendDummyInput("")
.appendTitle("Servo")
.appendTitle(new Blockly.FieldImage("http://www.seeedstudio.com/depot/images/product/a991.jpg", 64, 64))
.appendTitle("PIN#")
.appendTitle(new Blockly.FieldDropdown(profile.default.digital), "PIN");
this.appendDummyInput("")
.setAlign(Blockly.ALIGN_RIGHT)
.appendTitle("Read Degrees")
this.setOutput(true, Number);
this.setTooltip('return that degree with the last servo move.');
}
};
Blockly.Language.serial_print = {
category: 'In/Out',
helpUrl: 'http://www.arduino.cc/en/Serial/Print',
init: function() {
this.setColour(230);
this.appendValueInput("CONTENT", String)
.appendTitle("Serial Print");
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setTooltip('Prints data to the console/serial port as human-readable ASCII text.');
}
};
// define generators
Blockly.Arduino = Blockly.Generator.get('Arduino');
Blockly.Arduino.base_delay = function() {
var delay_time = Blockly.Arduino.valueToCode(this, 'DELAY_TIME', Blockly.Arduino.ORDER_ATOMIC) || '1000'
var code = 'delay(' + delay_time + ');\n';
return code;
};
Blockly.Arduino.base_map = function() {
var value_num = Blockly.Arduino.valueToCode(this, 'NUM', Blockly.Arduino.ORDER_NONE);
var value_dmax = Blockly.Arduino.valueToCode(this, 'DMAX', Blockly.Arduino.ORDER_ATOMIC);
var code = 'map('+value_num+', 0, 1024, 0, '+value_dmax+')';
return [code, Blockly.Arduino.ORDER_NONE];
};
Blockly.Arduino.inout_buildin_led = function() {
var dropdown_stat = this.getTitleValue('STAT');
Blockly.Arduino.setups_['setup_output_4'] = 'pinMode(4, OUTPUT);';
var code = 'digitalWrite(4,'+dropdown_stat+');\n'
return code;
};
Blockly.Arduino.inout_digital_write = function() {
var dropdown_pin = this.getTitleValue('PIN');
var dropdown_stat = this.getTitleValue('STAT');
Blockly.Arduino.setups_['setup_output_'+dropdown_pin] = 'pinMode('+dropdown_pin+', OUTPUT);';
var code = 'digitalWrite('+dropdown_pin+','+dropdown_stat+');\n'
return code;
};
Blockly.Arduino.inout_digital_read = function() {
var dropdown_pin = this.getTitleValue('PIN');
Blockly.Arduino.setups_['setup_input_'+dropdown_pin] = 'pinMode('+dropdown_pin+', INPUT);';
var code = 'digitalRead('+dropdown_pin+')';
return [code, Blockly.Arduino.ORDER_ATOMIC];
};
Blockly.Arduino.inout_analog_write = function() {
var dropdown_pin = this.getTitleValue('PIN');
//var dropdown_stat = this.getTitleValue('STAT');
var value_num = Blockly.Arduino.valueToCode(this, 'NUM', Blockly.Arduino.ORDER_ATOMIC);
//Blockly.Arduino.setups_['setup_output'+dropdown_pin] = 'pinMode('+dropdown_pin+', OUTPUT);';
var code = 'analogWrite('+dropdown_pin+','+value_num+');\n';
return code;
};
Blockly.Arduino.inout_analog_read = function() {
var dropdown_pin = this.getTitleValue('PIN');
//Blockly.Arduino.setups_['setup_input_'+dropdown_pin] = 'pinMode('+dropdown_pin+', INPUT);';
var code = 'analogRead('+dropdown_pin+')';
return [code, Blockly.Arduino.ORDER_ATOMIC];
};
Blockly.Arduino.inout_highlow = function() {
// Boolean values HIGH and LOW.
var code = (this.getTitleValue('BOOL') == 'HIGH') ? 'HIGH' : 'LOW';
return [code, Blockly.Arduino.ORDER_ATOMIC];
};
/*
//servo
#include <Servo.h>
Servo servo_11;
void setup() {
servo_11.attach(11);
}
void loop() {
servo_11.write(0);
delay(2000);
servo_11.write(150); //0~180
delay(2000);
}
*/
Blockly.Arduino.servo_move = function() {
var dropdown_pin = this.getTitleValue('PIN');
var value_degree = Blockly.Arduino.valueToCode(this, 'DEGREE', Blockly.Arduino.ORDER_ATOMIC);
//value_degree = value_degree.replace('(','').replace(')','')
var delay_time = Blockly.Arduino.valueToCode(this, 'DELAY_TIME', Blockly.Arduino.ORDER_ATOMIC) || '1000'
//delay_time = delay_time.replace('(','').replace(')','');
Blockly.Arduino.definitions_['define_servo'] = '#include <Servo.h>\n';
Blockly.Arduino.definitions_['var_servo'+dropdown_pin] = 'Servo servo_'+dropdown_pin+';\n';
Blockly.Arduino.setups_['setup_servo_'+dropdown_pin] = 'servo_'+dropdown_pin+'.attach('+dropdown_pin+');\n';
var code = 'servo_'+dropdown_pin+'.write('+value_degree+');\n'+'delay(' + delay_time + ');\n';
return code;
};
Blockly.Arduino.servo_read_degrees = function() {
var dropdown_pin = this.getTitleValue('PIN');
Blockly.Arduino.definitions_['define_servo'] = '#include <Servo.h>\n';
Blockly.Arduino.definitions_['var_servo'+dropdown_pin] = 'Servo servo_'+dropdown_pin+';\n';
Blockly.Arduino.setups_['setup_servo_'+dropdown_pin] = 'servo_'+dropdown_pin+'.attach('+dropdown_pin+');\n';
var code = 'servo_'+dropdown_pin+'.read()';
return code;
};
Blockly.Arduino.serial_print = function() {
var content = Blockly.Arduino.valueToCode(this, 'CONTENT', Blockly.Arduino.ORDER_ATOMIC) || '0'
//content = content.replace('(','').replace(')','');
Blockly.Arduino.setups_['setup_serial_'+profile.default.serial] = 'Serial.begin('+profile.default.serial+');\n';
var code = 'Serial.print('+content+');\nSerial.print(\'\\t\');\n';
return code;
};
|
osdomotics/maier
|
www/generators/arduino/base.js
|
JavaScript
|
apache-2.0
| 11,087
|
package com.coolweather.service;
import com.coolweather.util.HttpCallbackListener;
import com.coolweather.util.HttpUtils;
import com.coolweather.util.Utility;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.IBinder;
import android.os.SystemClock;
import android.preference.PreferenceManager;
import android.provider.Settings.System;
public class AutoUpdateService extends Service {
@Override
public IBinder onBind(Intent intent) {
// TODO Auto-generated method stub
return null;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
// TODO Auto-generated method stub
new Thread(new Runnable() {
@Override
public void run() {
// TODO Auto-generated method stub
updateWeather();
}
}).start();
AlarmManager manager = (AlarmManager) getSystemService(ALARM_SERVICE);
int anHour=8*60*60*1000;
long triggerAtTime = SystemClock.elapsedRealtime()+anHour;
Intent i = new Intent(this,AutoUpdateService.class);
PendingIntent pi = PendingIntent.getBroadcast(this, 0, i, 0);
manager.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, triggerAtTime, pi);
return super.onStartCommand(intent, flags, startId);
}
protected void updateWeather() {
// TODO Auto-generated method stub
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
String weatherCode = prefs.getString("weather_code", "");
String address="http://weatherapi.market.xiaomi.com/wtr-v2/weather?cityId="+weatherCode+".html";
HttpUtils.sendHttpRequest(address, new HttpCallbackListener() {
@Override
public void onFinish(String response) {
// TODO Auto-generated method stub
Utility.handleWeatherResponse(AutoUpdateService.this, response);
}
@Override
public void onError(Exception e) {
// TODO Auto-generated method stub
e.printStackTrace();
}
});
}
}
|
LBX123/coolweather
|
src/com/coolweather/service/AutoUpdateService.java
|
Java
|
apache-2.0
| 1,990
|
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import logging
import sys
import uuid
import fixtures
from oslo_serialization import jsonutils
from oslo_utils import strutils
from oslo_utils import timeutils
from stevedore import dispatch
from stevedore import extension
import testscenarios
import yaml
import oslo_messaging
from oslo_messaging.notify import _impl_log
from oslo_messaging.notify import _impl_test
from oslo_messaging.notify import messaging
from oslo_messaging.notify import notifier as msg_notifier
from oslo_messaging import serializer as msg_serializer
from oslo_messaging.tests import utils as test_utils
from six.moves import mock
load_tests = testscenarios.load_tests_apply_scenarios
class JsonMessageMatcher(object):
def __init__(self, message):
self.message = message
def __eq__(self, other):
return self.message == jsonutils.loads(other)
class _FakeTransport(object):
def __init__(self, conf):
self.conf = conf
def _send_notification(self, target, ctxt, message, version, retry=None):
pass
class _ReRaiseLoggedExceptionsFixture(fixtures.Fixture):
"""Record logged exceptions and re-raise in cleanup.
The notifier just logs notification send errors so, for the sake of
debugging test failures, we record any exceptions logged and re-raise them
during cleanup.
"""
class FakeLogger(object):
def __init__(self):
self.exceptions = []
def exception(self, msg, *args, **kwargs):
self.exceptions.append(sys.exc_info()[1])
def setUp(self):
super(_ReRaiseLoggedExceptionsFixture, self).setUp()
self.logger = self.FakeLogger()
def reraise_exceptions():
for ex in self.logger.exceptions:
raise ex
self.addCleanup(reraise_exceptions)
class TestMessagingNotifier(test_utils.BaseTestCase):
_v1 = [
('v1', dict(v1=True)),
('not_v1', dict(v1=False)),
]
_v2 = [
('v2', dict(v2=True)),
('not_v2', dict(v2=False)),
]
_publisher_id = [
('ctor_pub_id', dict(ctor_pub_id='test',
expected_pub_id='test')),
('prep_pub_id', dict(prep_pub_id='test.localhost',
expected_pub_id='test.localhost')),
('override', dict(ctor_pub_id='test',
prep_pub_id='test.localhost',
expected_pub_id='test.localhost')),
]
_topics = [
('no_topics', dict(topics=[])),
('single_topic', dict(topics=['notifications'])),
('multiple_topic2', dict(topics=['foo', 'bar'])),
]
_priority = [
('audit', dict(priority='audit')),
('debug', dict(priority='debug')),
('info', dict(priority='info')),
('warn', dict(priority='warn')),
('error', dict(priority='error')),
('sample', dict(priority='sample')),
('critical', dict(priority='critical')),
]
_payload = [
('payload', dict(payload={'foo': 'bar'})),
]
_context = [
('ctxt', dict(ctxt={'user': 'bob'})),
]
_retry = [
('unconfigured', dict()),
('None', dict(retry=None)),
('0', dict(retry=0)),
('5', dict(retry=5)),
]
@classmethod
def generate_scenarios(cls):
cls.scenarios = testscenarios.multiply_scenarios(cls._v1,
cls._v2,
cls._publisher_id,
cls._topics,
cls._priority,
cls._payload,
cls._context,
cls._retry)
def setUp(self):
super(TestMessagingNotifier, self).setUp()
self.logger = self.useFixture(_ReRaiseLoggedExceptionsFixture()).logger
self.stubs.Set(messaging, 'LOG', self.logger)
self.stubs.Set(msg_notifier, '_LOG', self.logger)
@mock.patch('oslo_utils.timeutils.utcnow')
def test_notifier(self, mock_utcnow):
drivers = []
if self.v1:
drivers.append('messaging')
if self.v2:
drivers.append('messagingv2')
self.config(driver=drivers,
topics=self.topics,
group='oslo_messaging_notifications')
transport = _FakeTransport(self.conf)
if hasattr(self, 'ctor_pub_id'):
notifier = oslo_messaging.Notifier(transport,
publisher_id=self.ctor_pub_id)
else:
notifier = oslo_messaging.Notifier(transport)
prepare_kwds = {}
if hasattr(self, 'retry'):
prepare_kwds['retry'] = self.retry
if hasattr(self, 'prep_pub_id'):
prepare_kwds['publisher_id'] = self.prep_pub_id
if prepare_kwds:
notifier = notifier.prepare(**prepare_kwds)
transport._send_notification = mock.Mock()
message_id = uuid.uuid4()
uuid.uuid4 = mock.Mock(return_value=message_id)
mock_utcnow.return_value = datetime.datetime.utcnow()
message = {
'message_id': str(message_id),
'publisher_id': self.expected_pub_id,
'event_type': 'test.notify',
'priority': self.priority.upper(),
'payload': self.payload,
'timestamp': str(timeutils.utcnow()),
}
sends = []
if self.v1:
sends.append(dict(version=1.0))
if self.v2:
sends.append(dict(version=2.0))
calls = []
for send_kwargs in sends:
for topic in self.topics:
if hasattr(self, 'retry'):
send_kwargs['retry'] = self.retry
else:
send_kwargs['retry'] = None
target = oslo_messaging.Target(topic='%s.%s' % (topic,
self.priority))
calls.append(mock.call(target,
self.ctxt,
message,
**send_kwargs))
method = getattr(notifier, self.priority)
method(self.ctxt, 'test.notify', self.payload)
uuid.uuid4.assert_called_once_with()
transport._send_notification.assert_has_calls(calls, any_order=True)
TestMessagingNotifier.generate_scenarios()
class TestSerializer(test_utils.BaseTestCase):
def setUp(self):
super(TestSerializer, self).setUp()
self.addCleanup(_impl_test.reset)
@mock.patch('oslo_utils.timeutils.utcnow')
def test_serializer(self, mock_utcnow):
transport = _FakeTransport(self.conf)
serializer = msg_serializer.NoOpSerializer()
notifier = oslo_messaging.Notifier(transport,
'test.localhost',
driver='test',
topic='test',
serializer=serializer)
message_id = uuid.uuid4()
uuid.uuid4 = mock.Mock(return_value=message_id)
mock_utcnow.return_value = datetime.datetime.utcnow()
serializer.serialize_context = mock.Mock()
serializer.serialize_context.return_value = dict(user='alice')
serializer.serialize_entity = mock.Mock()
serializer.serialize_entity.return_value = 'sbar'
notifier.info(dict(user='bob'), 'test.notify', 'bar')
message = {
'message_id': str(message_id),
'publisher_id': 'test.localhost',
'event_type': 'test.notify',
'priority': 'INFO',
'payload': 'sbar',
'timestamp': str(timeutils.utcnow()),
}
self.assertEqual([(dict(user='alice'), message, 'INFO', None)],
_impl_test.NOTIFICATIONS)
uuid.uuid4.assert_called_once_with()
serializer.serialize_context.assert_called_once_with(dict(user='bob'))
serializer.serialize_entity.assert_called_once_with(dict(user='bob'),
'bar')
class TestNotifierTopics(test_utils.BaseTestCase):
def test_topics_from_config(self):
self.config(driver=['log'],
group='oslo_messaging_notifications')
self.config(topics=['topic1', 'topic2'],
group='oslo_messaging_notifications')
transport = _FakeTransport(self.conf)
notifier = oslo_messaging.Notifier(transport, 'test.localhost')
self.assertEqual(['topic1', 'topic2'], notifier._topics)
def test_topics_from_kwargs(self):
self.config(driver=['log'],
group='oslo_messaging_notifications')
transport = _FakeTransport(self.conf)
notifier = oslo_messaging.Notifier(transport, 'test.localhost',
topic='topic1')
self.assertEqual(['topic1'], notifier._topics)
notifier = oslo_messaging.Notifier(transport, 'test.localhost',
topics=['topic1', 'topic2'])
self.assertEqual(['topic1', 'topic2'], notifier._topics)
class TestLogNotifier(test_utils.BaseTestCase):
@mock.patch('oslo_utils.timeutils.utcnow')
def test_notifier(self, mock_utcnow):
self.config(driver=['log'],
group='oslo_messaging_notifications')
transport = _FakeTransport(self.conf)
notifier = oslo_messaging.Notifier(transport, 'test.localhost')
message_id = uuid.uuid4()
uuid.uuid4 = mock.Mock()
uuid.uuid4.return_value = message_id
mock_utcnow.return_value = datetime.datetime.utcnow()
message = {
'message_id': str(message_id),
'publisher_id': 'test.localhost',
'event_type': 'test.notify',
'priority': 'INFO',
'payload': 'bar',
'timestamp': str(timeutils.utcnow()),
}
logger = mock.Mock()
logging.getLogger = mock.Mock()
logging.getLogger.return_value = logger
notifier.info({}, 'test.notify', 'bar')
uuid.uuid4.assert_called_once_with()
logging.getLogger.assert_called_once_with('oslo.messaging.'
'notification.test.notify')
logger.info.assert_called_once_with(JsonMessageMatcher(message))
def test_sample_priority(self):
# Ensure logger drops sample-level notifications.
driver = _impl_log.LogDriver(None, None, None)
logger = mock.Mock(spec=logging.getLogger('oslo.messaging.'
'notification.foo'))
logger.sample = None
logging.getLogger = mock.Mock()
logging.getLogger.return_value = logger
msg = {'event_type': 'foo'}
driver.notify(None, msg, "sample", None)
logging.getLogger.assert_called_once_with('oslo.messaging.'
'notification.foo')
def test_mask_passwords(self):
# Ensure that passwords are masked with notifications
driver = _impl_log.LogDriver(None, None, None)
logger = mock.MagicMock()
logger.info = mock.MagicMock()
message = {'password': 'passw0rd', 'event_type': 'foo'}
mask_str = jsonutils.dumps(strutils.mask_dict_password(message))
with mock.patch.object(logging, 'getLogger') as gl:
gl.return_value = logger
driver.notify(None, message, 'info', 0)
logger.info.assert_called_once_with(mask_str)
class TestRoutingNotifier(test_utils.BaseTestCase):
def setUp(self):
super(TestRoutingNotifier, self).setUp()
self.config(driver=['routing'],
group='oslo_messaging_notifications')
transport = _FakeTransport(self.conf)
self.notifier = oslo_messaging.Notifier(transport)
self.router = self.notifier._driver_mgr['routing'].obj
def _fake_extension_manager(self, ext):
return extension.ExtensionManager.make_test_instance(
[extension.Extension('test', None, None, ext), ])
def _empty_extension_manager(self):
return extension.ExtensionManager.make_test_instance([])
def test_should_load_plugin(self):
self.router.used_drivers = set(["zoo", "blah"])
ext = mock.MagicMock()
ext.name = "foo"
self.assertFalse(self.router._should_load_plugin(ext))
ext.name = "zoo"
self.assertTrue(self.router._should_load_plugin(ext))
def test_load_notifiers_no_config(self):
# default routing_config=""
self.router._load_notifiers()
self.assertEqual({}, self.router.routing_groups)
self.assertEqual(0, len(self.router.used_drivers))
def test_load_notifiers_no_extensions(self):
self.config(routing_config="routing_notifier.yaml",
group='oslo_messaging_notifications')
routing_config = r""
config_file = mock.MagicMock()
config_file.return_value = routing_config
with mock.patch.object(self.router, '_get_notifier_config_file',
config_file):
with mock.patch('stevedore.dispatch.DispatchExtensionManager',
return_value=self._empty_extension_manager()):
with mock.patch('oslo_messaging.notify.'
'_impl_routing.LOG') as mylog:
self.router._load_notifiers()
self.assertFalse(mylog.debug.called)
self.assertEqual({}, self.router.routing_groups)
def test_load_notifiers_config(self):
self.config(routing_config="routing_notifier.yaml",
group='oslo_messaging_notifications')
routing_config = r"""
group_1:
rpc : foo
group_2:
rpc : blah
"""
config_file = mock.MagicMock()
config_file.return_value = routing_config
with mock.patch.object(self.router, '_get_notifier_config_file',
config_file):
with mock.patch('stevedore.dispatch.DispatchExtensionManager',
return_value=self._fake_extension_manager(
mock.MagicMock())):
self.router._load_notifiers()
groups = list(self.router.routing_groups.keys())
groups.sort()
self.assertEqual(['group_1', 'group_2'], groups)
def test_get_drivers_for_message_accepted_events(self):
config = r"""
group_1:
rpc:
accepted_events:
- foo.*
- blah.zoo.*
- zip
"""
groups = yaml.safe_load(config)
group = groups['group_1']
# No matching event ...
self.assertEqual([],
self.router._get_drivers_for_message(
group, "unknown", "info"))
# Child of foo ...
self.assertEqual(['rpc'],
self.router._get_drivers_for_message(
group, "foo.1", "info"))
# Foo itself ...
self.assertEqual([],
self.router._get_drivers_for_message(
group, "foo", "info"))
# Child of blah.zoo
self.assertEqual(['rpc'],
self.router._get_drivers_for_message(
group, "blah.zoo.zing", "info"))
def test_get_drivers_for_message_accepted_priorities(self):
config = r"""
group_1:
rpc:
accepted_priorities:
- info
- error
"""
groups = yaml.safe_load(config)
group = groups['group_1']
# No matching priority
self.assertEqual([],
self.router._get_drivers_for_message(
group, None, "unknown"))
# Info ...
self.assertEqual(['rpc'],
self.router._get_drivers_for_message(
group, None, "info"))
# Error (to make sure the list is getting processed) ...
self.assertEqual(['rpc'],
self.router._get_drivers_for_message(
group, None, "error"))
def test_get_drivers_for_message_both(self):
config = r"""
group_1:
rpc:
accepted_priorities:
- info
accepted_events:
- foo.*
driver_1:
accepted_priorities:
- info
driver_2:
accepted_events:
- foo.*
"""
groups = yaml.safe_load(config)
group = groups['group_1']
# Valid event, but no matching priority
self.assertEqual(['driver_2'],
self.router._get_drivers_for_message(
group, 'foo.blah', "unknown"))
# Valid priority, but no matching event
self.assertEqual(['driver_1'],
self.router._get_drivers_for_message(
group, 'unknown', "info"))
# Happy day ...
x = self.router._get_drivers_for_message(group, 'foo.blah', "info")
x.sort()
self.assertEqual(['driver_1', 'driver_2', 'rpc'], x)
def test_filter_func(self):
ext = mock.MagicMock()
ext.name = "rpc"
# Good ...
self.assertTrue(self.router._filter_func(ext, {}, {}, 'info',
None, ['foo', 'rpc']))
# Bad
self.assertFalse(self.router._filter_func(ext, {}, {}, 'info',
None, ['foo']))
def test_notify(self):
self.router.routing_groups = {'group_1': None, 'group_2': None}
drivers_mock = mock.MagicMock()
drivers_mock.side_effect = [['rpc'], ['foo']]
with mock.patch.object(self.router, 'plugin_manager') as pm:
with mock.patch.object(self.router, '_get_drivers_for_message',
drivers_mock):
self.notifier.info({}, 'my_event', {})
self.assertEqual(sorted(['rpc', 'foo']),
sorted(pm.map.call_args[0][6]))
def test_notify_filtered(self):
self.config(routing_config="routing_notifier.yaml",
group='oslo_messaging_notifications')
routing_config = r"""
group_1:
rpc:
accepted_events:
- my_event
rpc2:
accepted_priorities:
- info
bar:
accepted_events:
- nothing
"""
config_file = mock.MagicMock()
config_file.return_value = routing_config
rpc_driver = mock.Mock()
rpc2_driver = mock.Mock()
bar_driver = mock.Mock()
pm = dispatch.DispatchExtensionManager.make_test_instance(
[extension.Extension('rpc', None, None, rpc_driver),
extension.Extension('rpc2', None, None, rpc2_driver),
extension.Extension('bar', None, None, bar_driver)],
)
with mock.patch.object(self.router, '_get_notifier_config_file',
config_file):
with mock.patch('stevedore.dispatch.DispatchExtensionManager',
return_value=pm):
self.notifier.info({}, 'my_event', {})
self.assertFalse(bar_driver.info.called)
rpc_driver.notify.assert_called_once_with(
{}, mock.ANY, 'INFO', None)
rpc2_driver.notify.assert_called_once_with(
{}, mock.ANY, 'INFO', None)
|
ozamiatin/oslo.messaging
|
oslo_messaging/tests/notify/test_notifier.py
|
Python
|
apache-2.0
| 20,524
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*/
package com.osbcp.csssquasher;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.osbcp.cssparser.PropertyValue;
import com.osbcp.cssparser.Rule;
import com.osbcp.cssparser.Selector;
/**
* Main logic for refactoring rules.
*
* Meaning moving property values between rules to minimize the size.
*
* @author <a href="mailto:christoffer@christoffer.me">Christoffer Pettersson</a>
*/
class LogicRefactor {
private final StringBuilder log;
private Map<PropertyValue, List<List<Selector>>> entries = new LinkedHashMap<PropertyValue, List<List<Selector>>>();
private int fixed;
/**
* Creates the refactor.
* @param log For logging.
*/
public LogicRefactor(final StringBuilder log) {
this.log = log;
}
/**
* Main logic.
*
* @param rules List of rules that should be refactored.
* @return A List of rules that are refactored.
*/
public List<Rule> refactor(final List<Rule> rules) {
for (Rule rule : rules) {
for (PropertyValue propertyValue : rule.getPropertyValues()) {
register(propertyValue, rule.getSelectors());
}
}
for (Entry<PropertyValue, List<List<Selector>>> entry : entries.entrySet()) {
PropertyValue propertyValueKeyString = entry.getKey();
List<List<Selector>> selectors = entry.getValue();
if (selectors.size() < 2) {
continue;
}
int selectorNameLength = getSelectorNameLength(selectors);
// If the size is ok, create a new rule
if (selectorNameLength < propertyValueKeyString.toString().length()) {
// System.out.println("property=" + propertyValueKeyString);
// System.out.println("selectors=" + selectors);
log.append("Refactoring the property '" + propertyValueKeyString + "'.\n");
fixed++;
deletePropertyValueFromSelectors(rules, propertyValueKeyString);
// selectors.add(new Selector("LOLZ"));
List<Selector> mmm = new ArrayList<Selector>();
for (List<Selector> ss : selectors) {
for (Selector s : ss) {
mmm.add(s);
}
}
Rule rule = new Rule(mmm);
rule.addPropertyValue(propertyValueKeyString);
rules.add(rule);
}
}
return rules;
}
/**
* Deletes property values from all rules that may have those values.
*
* @param rules A list of rules to go through.
* @param propertyValue Property value that should be removed from the rules.
*/
private void deletePropertyValueFromSelectors(final List<Rule> rules, final PropertyValue propertyValue) {
final List<Rule> rulesToBeRemoved = new ArrayList<Rule>();
for (Rule rule : rules) {
Set<PropertyValue> values = new HashSet<PropertyValue>(rule.getPropertyValues());
for (PropertyValue pv : values) {
if (pv.equals(propertyValue)) {
rule.removePropertyValue(pv);
}
}
if (rule.getPropertyValues().size() == 0) {
rulesToBeRemoved.add(rule);
}
}
for (Rule rule : rulesToBeRemoved) {
rules.remove(rule);
}
}
/**
* Get the total number of selectors.
*
* @param selectors The nested list of selectors to search through.
* @return The total number of selectors.
*/
private int getSelectorNameLength(final List<List<Selector>> selectors) {
int length = 0;
for (List<Selector> s1 : selectors) {
for (Selector s2 : s1) {
length += s2.toString().length();
}
}
return length;
}
/**
* Registers selectors associated with a property value.
*
* @param propertyValue The property value.
* @param selector The selectors that should be registered.
*/
public void register(final PropertyValue propertyValue, final List<Selector> selector) {
List<List<Selector>> selectors = entries.get(propertyValue);
if (selectors == null) {
selectors = new ArrayList<List<Selector>>();
entries.put(propertyValue, selectors);
}
selectors.add(selector);
}
/**
* Returns the number of refactored rules.
*
* @return The number of refactored rules.
*/
public int getNumberOfRefactored() {
return fixed;
}
}
|
corgrath/osbcp-css-squasher
|
src/com/osbcp/csssquasher/LogicRefactor.java
|
Java
|
apache-2.0
| 4,782
|
Given(/^I am on http:\/\/localhost:(\d+)\/$/) do |arg1|
#pending # express the regexp above with the code you wish you had
visit ('http://localhost:7003')
end
When(/^I click on the "(.*?)"$/) do |view|
title=view
page.find(:xpath, "//span[contains(text(),'#{title}')]").click
end
And(/^I click on edit of "(.*?)" $/) do |widgetname|
title = widgetname
page.find(:xpath, "//span[normalize-space(text())='#{title}']/following-sibling::span[3]").click
end
#span[2] = close
#span[3]= edit/settings
And(/^I change the title on pop\-up to "(.*?)"$/) do |arg1|
#pending # express the regexp above with the code you wish you had
fill_in('widgetTitle',:with=>'Example')
end
And(/^I click on ok button$/) do
#pending # express the regexp above with the code you wish you had
page.find(:xpath,"html/body/div[4]/div/div/div[3]/button[2]").click
end
Then(/^I should see the widget (\d+) title change to "(.*?)"$/) do |arg1, arg2|
#pending # express the regexp above with the code you wish you had
expect(page).to have_content(arg2)
end
Then(/^I should see the "(.*?)" on page for National view$/) do |arg1|
#pending # express the regexp above with the code you wish you had
expect(page).to have_content(arg1)
end
When(/^I click on individual "(.*?)"$/) do |arg1|
#page.find(:xpath,"html/body/div[2]/div/ul/li[4]/a/span[1]").click
page.find(:xpath, "//span[contains(text(),'Individual View')]").click
end
Then(/^I should see the widget "(.*?)" on page for Individual view$/) do |arg1|
expect(page).to have_content(arg1)
end
#edit widget
When(/^I click on individual view of "(.*?)"$/) do |arg1|
page.find(:xpath,"html/body/div[2]/div/ul/li[4]/a/span[1]").click
sleep(10)
end
When(/^I click on edit of "(.*?)"$/) do |arg1|
#pending # express the regexp above with the code you wish you had
#page.find(:xpath,"html/body/div[2]/div/div/div/div[2]/div[1]/div/div[1]/h3/span[4]").click
title= 'Veteran Roster by VAMC'
page.find(:xpath, "//span[normalize-space(text())='#{title}']/following-sibling::span[3]").click
end
When(/^I click on "(.*?)" button$/) do |arg1|
#pending # express the regexp above with the code you wish you had
#page.find(:xpath,"html/body/div[4]/div/div/div[3]/button[2]").click
click_button("OK")
end
When(/^I click on save button$/) do
page.find(:xpath,"html/body/div[2]/div/div/div/div[1]/button[2]").click
end
#delete widget
When(/^I click on delete of "(.*?)"$/) do |arg1|
#page.find(:xpath,"html/body/div[2]/div/div/div/div[2]/div[1]/div/div[1]/h3/span[3]").click
title = 'Medication'
page.find(:xpath, "//span[normalize-space(text())='#{title}']/following-sibling::span[2]").click
end
When(/^I click on save changes$/) do
#pending # express the regexp above with the code you wish you had
page.find(:xpath,"html/body/div[2]/div/div/div/div[1]/button[2]").click
end
Then(/^I should not see the widget "(.*?)"$/) do |arg1|
#pending # express the regexp above with the code you wish you had
expect(page).to have_no_content(arg1)
end
Then(/^i should not see the widget "(.*?)"$/) do |arg1|
#pending # express the regexp above with the code you wish you had
expect(page).to have_no_content(arg1)
end
#Add Widget
When(/^I click on add a widget$/) do
page.find(:xpath ,"/html/body/div[2]/div/div/div/div[1]/div/span/button/span").click
#click_button('Add a Wdiget')
end
When(/^I click on random$/) do
page.find(:xpath,"/html/body/div[2]/div/div/div/div[1]/div/span/ul/li[2]/button").click
#click_button("random")
#title='random'
#page.find(:xpath, "//li[normalize-space(text())='#{title}']/following-sibling::li[3]").click
end
Then(/^I should see the new widget with title "(.*?)"$/) do |arg1|
expect(page).to have_no_content(arg1)
end
When(/^I Click on edit of widget$/) do
title= 'Widget1'
page.find(:xpath, "//span[normalize-space(text())='#{title}']/following-sibling::span[3]").click
end
When(/^i Change the title on pop\-up to roster "(.*?)"$/) do |arg1|
fill_in('widgetTitle',:with=>'roster')
end
Then(/^I should see widget with title roster "(.*?)"$/) do |arg1|
expect(page).to have_no_content(arg1)
end
|
VHAINNOVATIONS/PerceptiveReach
|
CI_CD/Backup/CucumberScripts/SPCWidget/features/step_definitions/SPC_Widget_steps.rb
|
Ruby
|
apache-2.0
| 4,158
|
// Licensed under the Apache License, Version 2.0
package com.flingtap.done;
import com.flingtap.common.HandledException;
import com.flurry.android.FlurryAgent;
import android.app.Dialog;
import android.app.TabActivity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
/**
* A TabActivity which supports the use of participants (parts).
*
* @author spencer
*
*/
public abstract class CoordinatedTabActivity extends com.flingtap.done.android.TabActivity {
private static final String TAG = "CoordinatedActivity";
ParticipantOrganizer partOrg = new ParticipantOrganizer();
protected void addParticipant(ContextActivityParticipant participant){
participant.setIntent(getIntent());
partOrg.addParticipant(participant);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
SessionUtil.onSessionStart(this); // TabActivity is an ActivityGroup and each of the contained Activities will start/stop the session on their own.
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
try{
super.onActivityResult(requestCode, resultCode, data);
partOrg.onActivityResult(requestCode, resultCode, data);
}catch(HandledException h){ // Ignore.
}catch(Exception exp){
Log.e(TAG, "ERR0002C", exp);
ErrorUtil.handleExceptionNotifyUser("ERR0002C", exp, this);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
try{
boolean superResult = super.onCreateOptionsMenu(menu);
boolean partResult = partOrg.onCreateOptionsMenu(menu);
return partResult || superResult;
}catch(HandledException h){ // Ignore.
}catch(Exception exp){
Log.e(TAG, "ERR0002D", exp);
ErrorUtil.handleExceptionNotifyUser("ERR0002D", exp, this);
}
return false;
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
try{
boolean superResult = super.onPrepareOptionsMenu(menu);
boolean partResult = partOrg.onPrepareOptionsMenu(menu);
return partResult || superResult;
}catch(HandledException h){ // Ignore.
}catch(Exception exp){
Log.e(TAG, "ERR0002E", exp);
ErrorUtil.handleExceptionNotifyUser("ERR0002E", exp, this);
}
return false;
}
protected Dialog onCreateDialog(int dialogId) {
try{
Dialog dialog = super.onCreateDialog(dialogId); // TODO: Verify if I need to do this.
if( null == dialog ){
return partOrg.onCreateDialog(dialogId);
}else{
return dialog;
}
}catch(HandledException h){ // Ignore.
}catch(Exception exp){
Log.e(TAG, "ERR0002F", exp);
ErrorUtil.handleExceptionNotifyUser("ERR0002F", exp, this);
}
return null;
}
protected void onPrepareDialog(int dialogId, Dialog dialog){
try{
super.onPrepareDialog(dialogId, dialog);
partOrg.onPrepareDialog(dialogId, dialog);
}catch(HandledException h){ // Ignore.
}catch(Exception exp){
Log.e(TAG, "ERR0002G", exp);
ErrorUtil.handleExceptionNotifyUser("ERR0002G", exp, this);
}
}
@Override
protected void onResume() {
super.onResume();
SessionUtil.onSessionStart(this); // TabActivity is an ActivityGroup and each of the contained Activities will start/stop the session on their own.
}
@Override
protected void onPause() {
super.onPause();
SessionUtil.onSessionStop(this); // TabActivity is an ActivityGroup and each of the contained Activities will start/stop the session on their own.
}
public void onSaveInstanceState(Bundle outState){
super.onSaveInstanceState(outState);
try{
partOrg.onSaveInstanceState(outState);
}catch(HandledException h){ // Ignore.
}catch(Exception exp){
Log.e(TAG, "ERR000ED", exp);
ErrorUtil.handleExceptionNotifyUser("ERR000ED", exp, this);
}
}
public void onRestoreInstanceState(Bundle savedInstanceState){
super.onRestoreInstanceState(savedInstanceState);
try{
partOrg.onRestoreInstanceState(savedInstanceState);
}catch(HandledException h){ // Ignore.
}catch(Exception exp){
Log.e(TAG, "ERR000EE", exp);
ErrorUtil.handleExceptionNotifyUser("ERR000EE", exp, this);
}
}
protected void onDestroy() {
super.onDestroy();
partOrg.onDestroy();
}
}
|
SpencerRiddering/flingtap-done
|
flingtap-done/src/main/java/com/flingtap/done/CoordinatedTabActivity.java
|
Java
|
apache-2.0
| 4,194
|
<?php
declare(strict_types=1);
namespace Gdbots\Pbjx\DependencyInjection;
/** Marker interface for pbjx projectors */
interface PbjxProjector
{
}
|
gdbots/pbjx-php
|
src/DependencyInjection/PbjxProjector.php
|
PHP
|
apache-2.0
| 148
|
<div class="proj-container">
<p>In an effort to increase product adoption at the end-user level, I designed a series of user focused microsite to showcase top use cases for the kiteworks end users. The concept behind the mockup design is limit the amount of clicks and scrolling to cover useful information including top uses, tutorials videos, FAQ, and links sections.</p>
<img src="assets/img/portfolio/proj-4/img1.jpg" alt="Microsite">
<img src="assets/img/portfolio/proj-4/img2.jpg" alt="Microsite">
<img src="assets/img/portfolio/proj-4/img3.jpg" alt="Microsite">
<img src="assets/img/portfolio/proj-4/img4.jpg" alt="Microsite">
<img src="assets/img/portfolio/proj-4/img5.jpg" alt="Microsite">
</div>
|
dary410/dary410.github.io
|
_includes/portfolio/proj-4.html
|
HTML
|
apache-2.0
| 718
|
{
graphitePort: 2003
, graphiteHost: "{{ inventory_hostname }}"
, port: 8125
, backends: [ "./backends/graphite" ]
, graphite: {
legacyNamespace: false
}
}
|
invokelabs/invokelabs-ansible-roles
|
stats/templates/localConfig.js
|
JavaScript
|
apache-2.0
| 160
|
# Appianoporites S.Y. Sm., Currah & Stockey GENUS
#### Status
ACCEPTED
#### According to
Index Fungorum
#### Published in
Mycologia 96(1): 181 (2004)
#### Original name
Appianoporites S.Y. Sm., Currah & Stockey
### Remarks
null
|
mdoering/backbone
|
life/Fungi/Appianoporites/README.md
|
Markdown
|
apache-2.0
| 232
|
/*---------------------------------------------------------------------------------------------*
* Copyright (C) 2012 Daniel Bolaños - www.bltek.com - Boulder Language Technologies *
* *
* www.bavieca.org is the website of the Bavieca Speech Recognition Toolkit *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
*---------------------------------------------------------------------------------------------*/
#ifndef LMFSM_H
#define LMFSM_H
using namespace std;
#if defined __linux__ || defined __APPLE__ || __MINGW32__
#include <tr1/unordered_map>
#elif _MSC_VER
#include <hash_map>
#else
#error "unsupported platform"
#endif
#include <list>
#include <string>
#include <vector>
#include "LexiconManager.h"
#include "LMARPA.h"
#include "LogMessage.h"
namespace Bavieca {
struct _LMArcTemp;
typedef vector<_LMArcTemp*> VLMArcTemp;
typedef list<_LMArcTemp*> LLMArcTemp;
#define BACKOFF_ARC INT_MAX // backoff arcs are used to connect to back-off states
// temporal language model state (each state is connected to other states via epsilon or a word)
typedef struct {
int iState;
LLMArcTemp lArc;
//NGram *ngram;
} LMStateTemp;
typedef list<LMStateTemp*> LLMStateTemp;
// temporal language model arc (keeps the score)
typedef struct _LMArcTemp {
int iLexUnit; // lexical unit
float fScore; // language model score
LMStateTemp *stateDest;
} LMArcTemp;
// language model state (each state is connected to other states via epsilon or a word)
typedef struct {
int iArcBase; // index of the first outgoing arc
} LMState;
// language model arc (keeps the score)
typedef struct {
int iLexUnit; // lexical unit
float fScore; // language model score
int iStateDest;
} LMArc;
#if defined __linux__ || defined __APPLE__ || __MINGW32__
typedef std::tr1::unordered_map<string,LMStateTemp*> MNGramState;
#elif _MSC_VER
typedef std::unordered_map<string,LMStateTemp*> MNGramState;
#endif
/**
@author root <dani.bolanos@gmail.com>
*/
class LMFSM {
private:
LexiconManager *m_lexiconManager;
bool m_bLoaded;
// lm properties
int m_iNGramOrder;
int *m_iNGrams;
// language model states and arcs
int m_iLMStateInitial;
int m_iLMStateFinal;
LMState *m_states;
LMArc *m_arcs;
int m_iStates;
int m_iArcs;
int m_iArcsStandard;
int m_iArcsBackoff;
// compare two arcs by lexical unit index
static bool compareArcs(const LMArcTemp *arc1, const LMArcTemp *arc2) {
return (arc1->iLexUnit < arc2->iLexUnit);
}
// return a new temporal language model arc
LMArcTemp *newArc(int iLexUnit, float fScore, LMStateTemp *stateDest) {
LMArcTemp *arc = new LMArcTemp;
arc->iLexUnit = iLexUnit;
arc->fScore = fScore;
arc->stateDest = stateDest;
if (iLexUnit != BACKOFF_ARC) {
++m_iArcsStandard;
} else {
++m_iArcsBackoff;
}
return arc;
}
// compute a hash key
string hashKey(NGram *ngram, bool bIgnoreLower = false, int iLexUnitInit = -1) {
int iLen = sizeof(int)*(m_iNGramOrder-1)+2;
char *strKey = new char[iLen];
strKey[0] = '#'; // for the backoff
int i=1;
if (iLexUnitInit != -1) {
memcpy(strKey+i,&iLexUnitInit,sizeof(int));
i+=sizeof(int);
}
assert(ngram);
NGram *ngramAux = ngram;
while(ngramAux->ngramBase && (!bIgnoreLower || ngramAux->ngramBase->ngramBase)) {
memcpy(strKey+i,&ngramAux->iLexUnit,sizeof(int));
i+=sizeof(int);
ngramAux = ngramAux->ngramBase;
}
strKey[i] = 0;
assert(i<iLen);
string str(strKey,i);
delete [] strKey;
return str;
}
public:
//constructor
LMFSM(LexiconManager *lexiconManager);
// destructor
~LMFSM();
// return whether the lm is loaded
bool lmLoaded() {
return m_bLoaded;
}
// build the FSM
void build(LMARPA *lmARPA);
// perform sanity checks to make sure that all the states/transitions created are connected
void checkConnected(LMStateTemp *stateInitial, LMStateTemp *stateFinal,
LMStateTemp *stateBackoffZerogram, int iStates, int iArcs);
// compact the FSM to use less memory and speed-up lookups (better locality)
void compact(LMStateTemp *states, int iStates, int iArcs, LMStateTemp *stateInitial, LMStateTemp *stateFinal);
// store to disk
void store(const char *strFile);
// load from disk
void load(const char *strFile);
// get the initial state
int getInitialState();
// update the language model state with the given lexical unit and returns the new lm state
int updateLMState(int iLMStatePrev, int iLexUnit, float *fScore);
// return the score resulting from moving to the given lm-state to the final state
float toFinalState(int iLMState);
// return language model scores for all words in the vocabulary for a given LM-state (word history)
// typically used for language model look-ahead
void getLMScores(int iLMState, float *fLMScores, int iVocabularySize);
// compute the likelihood of the given sequence of word
float computeLikelihood(const char *str);
// return the n-gram order
int getNGramOrder() {
return m_iNGramOrder;
}
// print
void print();
};
}; // end-of-namespace
#endif
|
nlphacker/bavieca
|
src/common/base/LMFSM.h
|
C
|
apache-2.0
| 6,432
|
from __future__ import absolute_import
import json
from changes.config import db
from changes.constants import Result
from changes.models.jobplan import JobPlan
from changes.utils.http import build_web_uri
from .base import ArtifactHandler, ArtifactParseError
class CollectionArtifactHandler(ArtifactHandler):
"""
Base class artifact handler for collection (jobs.json and tests.json) files.
Does the required job expansion. Subclasses are expected to set
cls.FILENAMES to the handleable files in question.
"""
def process(self, fp, artifact):
try:
phase_config = json.load(fp)
except ValueError:
uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex))
self.logger.warning('Failed to parse json; (step=%s, build=%s)', self.step.id.hex, uri, exc_info=True)
self.report_malformed()
else:
_, implementation = JobPlan.get_build_step_for_job(job_id=self.step.job_id)
try:
implementation.expand_jobs(self.step, phase_config)
except ArtifactParseError:
uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex))
self.logger.warning('malformed %s artifact (step=%s, build=%s)', self.FILENAMES[0],
self.step.id.hex, uri, exc_info=True)
self.report_malformed()
except Exception:
uri = build_web_uri('/find_build/{0}/'.format(self.step.job.build_id.hex))
self.logger.warning('expand_jobs failed (step=%s, build=%s)', self.step.id.hex, uri, exc_info=True)
self.step.result = Result.infra_failed
db.session.add(self.step)
db.session.commit()
class TestsJsonHandler(CollectionArtifactHandler):
# only match in the root directory
FILENAMES = ('/tests.json',)
|
dropbox/changes
|
changes/artifacts/collection_artifact.py
|
Python
|
apache-2.0
| 1,917
|
package com.samsung.spensdk.example.tools;
import java.io.File;
import com.loftcat.R;
import android.app.Activity;
import android.os.Bundle;
import android.os.Environment;
import android.view.Window;
import android.widget.TextView;
import android.content.Intent;
public class ToolListActivity extends Activity {
static public final String EXTRA_LIST_PATH = "ExtraListPath";
static public final String EXTRA_FILE_EXT_ARRAY = "ExtraFileExts";
static public final String EXTRA_SELECTED_FILE = "ExtraSelectedPath";
static public final String EXTRA_SEARCH_ONLY_SAMM_FILE = "ExtraSearchOnlySammFile";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.tool_file_list);
ToolFileListView fListView = (ToolFileListView) findViewById(R.id.fileListView);
Intent intent = getIntent();
// Set search file path
String strPath = intent.getStringExtra(EXTRA_LIST_PATH);
if(strPath==null){
File sdcard_path = Environment.getExternalStorageDirectory();
strPath = sdcard_path.getPath();
}
// Set search file extension
String [] exts = intent.getStringArrayExtra(EXTRA_FILE_EXT_ARRAY);
boolean bSearchOnlySammFile = intent.getBooleanExtra(EXTRA_SEARCH_ONLY_SAMM_FILE, true);
// Set list view by search normal file or SAMM file
fListView.setFilePath(strPath, exts, bSearchOnlySammFile);
fListView.setOnFileSelectedListener(mFileSelectedListener);
TextView statusTextView = (TextView) findViewById(R.id.statusTitle);
String status;
if(fListView.isEmpty())
status = "File not Found";
else
{
int count = fListView.getListCount();
if(count == 1)
status = "Total (1) File";
else
status = "Total (" + fListView.getListCount() + ") Files";
}
statusTextView.setText(status);
fListView.setFocusable(true);
fListView.setFocusableInTouchMode(true);
}
private OnFileSelectedListener mFileSelectedListener = new OnFileSelectedListener() {
@Override
public void onSelected(String path, String fileName) {
Intent intent = getIntent();
String strSelectFileName = path + fileName;
intent.putExtra(EXTRA_SELECTED_FILE, strSelectFileName);
setResult(RESULT_OK, intent);
finish();
}
};
}
|
loftcat/WeiCat
|
src/com/samsung/spensdk/example/tools/ToolListActivity.java
|
Java
|
apache-2.0
| 2,387
|
/*
Copyright 2017 WALLIX
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package graph
import (
"errors"
"fmt"
"reflect"
"regexp"
"strings"
"unicode"
"github.com/wallix/awless/cloud"
"github.com/wallix/awless/cloud/properties"
"github.com/wallix/awless/cloud/rdf"
tstore "github.com/wallix/triplestore"
)
type Resource struct {
kind, id string
properties map[string]interface{}
relations map[string][]*Resource
meta map[string]interface{}
}
const notFoundResourceType = "notfound"
func NotFoundResource(id string) *Resource {
return &Resource{
id: id,
kind: notFoundResourceType,
properties: make(map[string]interface{}),
meta: make(map[string]interface{}),
relations: make(map[string][]*Resource),
}
}
func InitResource(kind, id string) *Resource {
return &Resource{
id: id,
kind: kind,
properties: map[string]interface{}{properties.ID: id},
meta: make(map[string]interface{}),
relations: make(map[string][]*Resource),
}
}
func (res *Resource) String() string {
if res == nil {
res = &Resource{}
}
return res.Format("%n[%t]")
}
var (
layoutRegex = regexp.MustCompile("%(\\[(\\w+)\\])?(\\w)")
)
func (res *Resource) Format(layout string) (out string) {
out = layout
if matches := layoutRegex.FindAllStringSubmatch(layout, -1); matches != nil {
for _, match := range matches {
var val string
verb := match[len(match)-1]
switch verb {
case "i":
val = "<none>"
if id := res.Id(); id != "" {
val = id
}
case "t":
switch {
case res.Type() == notFoundResourceType:
val = "<not-found>"
case res.Type() != "":
val = res.Type()
default:
val = "<none>"
}
case "n":
val = res.Id()
if name, ok := res.properties[properties.Name].(string); ok && name != "" {
val = "@" + name
}
case "p":
if v, ok := res.properties[match[2]]; ok {
val = fmt.Sprint(v)
}
default:
return fmt.Sprintf("invalid verb '%s' in layout '%s'", verb, layout)
}
out = strings.Replace(out, match[0], val, 1)
}
}
return
}
func (res *Resource) Type() string {
return res.kind
}
func (res *Resource) Id() string {
return res.id
}
func (res *Resource) Properties() map[string]interface{} {
return res.properties
}
func (res *Resource) Property(k string) (interface{}, bool) {
v, ok := res.properties[k]
return v, ok
}
func (res *Resource) Meta(k string) (interface{}, bool) {
v, ok := res.meta[k]
return v, ok
}
func (res *Resource) SetProperty(k string, v interface{}) {
res.properties[k] = v
}
func (res *Resource) AddRelation(typ string, rel *Resource) {
res.relations[typ] = append(res.relations[typ], rel)
}
// Compare only the id and type of the resources (no properties nor meta)
func (res *Resource) Same(other cloud.Resource) bool {
if res == nil && other == nil {
return true
}
if res == nil || other == nil {
return false
}
return res.Id() == other.Id() && res.Type() == other.Type()
}
func (res *Resource) marshalFullRDF() ([]tstore.Triple, error) {
var triples []tstore.Triple
cloudType := namespacedResourceType(res.Type())
triples = append(triples, tstore.SubjPred(res.id, rdf.RdfType).Resource(cloudType))
for key, value := range res.meta {
if key == "diff" {
triples = append(triples, tstore.SubjPred(res.id, MetaPredicate).StringLiteral(fmt.Sprint(value)))
}
}
for key, value := range res.properties {
if value == nil {
continue
}
propId, err := rdf.Properties.GetRDFId(key)
if err != nil {
return triples, fmt.Errorf("resource %s: marshalling property: %s", res, err)
}
propType, err := rdf.Properties.GetDefinedBy(propId)
if err != nil {
return triples, fmt.Errorf("resource %s: marshalling property: %s", res, err)
}
dataType, err := rdf.Properties.GetDataType(propId)
if err != nil {
return triples, fmt.Errorf("resource %s: marshalling property: %s", res, err)
}
switch propType {
case rdf.RdfsLiteral, rdf.RdfsClass:
obj, err := marshalToRdfObject(value, propType, dataType)
if err != nil {
return triples, fmt.Errorf("resource %s: marshalling property '%s': %s", res, key, err)
}
triples = append(triples, tstore.SubjPred(res.Id(), propId).Object(obj))
case rdf.RdfsList:
switch dataType {
case rdf.XsdString:
list, ok := value.([]string)
if !ok {
return triples, fmt.Errorf("resource %s: marshalling property '%s': expected a string slice, got a %T", res, key, value)
}
for _, l := range list {
triples = append(triples, tstore.SubjPred(res.id, propId).StringLiteral(l))
}
case rdf.RdfsClass:
list, ok := value.([]string)
if !ok {
return triples, fmt.Errorf("resource %s: marshalling property '%s': expected a string slice, got a %T", res, key, value)
}
for _, l := range list {
triples = append(triples, tstore.SubjPred(res.id, propId).Resource(l))
}
case rdf.NetFirewallRule:
list, ok := value.([]*FirewallRule)
if !ok {
return triples, fmt.Errorf("resource %s: marshalling property '%s': expected a firewall rule slice, got a %T", res, key, value)
}
for _, r := range list {
ruleId := randomRdfId()
triples = append(triples, tstore.SubjPred(res.id, propId).Resource(ruleId))
triples = append(triples, r.marshalToTriples(ruleId)...)
}
case rdf.NetRoute:
list, ok := value.([]*Route)
if !ok {
return triples, fmt.Errorf("resource %s: marshalling property '%s': expected a route slice, got a %T", res, key, value)
}
for _, r := range list {
routeId := randomRdfId()
triples = append(triples, tstore.SubjPred(res.id, propId).Resource(routeId))
triples = append(triples, r.marshalToTriples(routeId)...)
}
case rdf.Grant:
list, ok := value.([]*Grant)
if !ok {
return triples, fmt.Errorf("resource %s: marshalling property '%s': expected a grant slice, got a %T", res, key, value)
}
for _, g := range list {
grantId := randomRdfId()
triples = append(triples, tstore.SubjPred(res.id, propId).Resource(grantId))
triples = append(triples, g.marshalToTriples(grantId)...)
}
case rdf.KeyValue:
list, ok := value.([]*KeyValue)
if !ok {
return triples, fmt.Errorf("resource %s: marshalling property '%s': expected a keyvalue slice, got a %T", res, key, value)
}
for _, kv := range list {
keyValId := randomRdfId()
triples = append(triples, tstore.SubjPred(res.id, propId).Resource(keyValId))
triples = append(triples, kv.marshalToTriples(keyValId)...)
}
case rdf.DistributionOrigin:
list, ok := value.([]*DistributionOrigin)
if !ok {
return triples, fmt.Errorf("resource %s: marshalling property '%s': expected a distribution origin slice, got a %T", res, key, value)
}
for _, o := range list {
keyValId := randomRdfId()
triples = append(triples, tstore.SubjPred(res.id, propId).Resource(keyValId))
triples = append(triples, o.marshalToTriples(keyValId)...)
}
case rdf.Grant:
default:
return triples, fmt.Errorf("resource %s: marshalling property '%s': unexpected rdfs:DataType: %s", res, key, dataType)
}
default:
return triples, fmt.Errorf("resource %s: marshalling property '%s': unexpected rdfs:isDefinedBy: %s", res, key, propType)
}
}
return triples, nil
}
func marshalToRdfObject(i interface{}, definedBy, dataType string) (tstore.Object, error) {
switch definedBy {
case rdf.RdfsLiteral:
return tstore.ObjectLiteral(i)
case rdf.RdfsClass:
return tstore.Resource(fmt.Sprint(i)), nil
default:
return nil, fmt.Errorf("unexpected rdfs:isDefinedBy: %s", definedBy)
}
}
func (res *Resource) unmarshalFullRdf(gph tstore.RDFGraph) error {
cloudType := namespacedResourceType(res.Type())
if !gph.Contains(tstore.SubjPred(res.Id(), rdf.RdfType).Resource(cloudType)) {
return fmt.Errorf("triple <%s><%s><%s> not found in graph", res.Id(), rdf.RdfType, cloudType)
}
for _, t := range gph.WithSubject(res.Id()) {
pred := t.Predicate()
if !rdf.Properties.IsRDFProperty(pred) || rdf.Properties.IsRDFSubProperty(pred) {
continue
}
propKey, err := rdf.Properties.GetLabel(pred)
if err != nil {
return fmt.Errorf("unmarshalling property: label: %s", err)
}
propVal, err := getPropertyValue(gph, t.Object(), pred)
if err != nil {
return fmt.Errorf("unmarshalling property '%s' of resource '%s': %s", propKey, res.Id(), err)
}
if rdf.Properties.IsRDFList(pred) {
dataType, err := rdf.Properties.GetDataType(pred)
if err != nil {
return fmt.Errorf("unmarshalling property: datatype: %s", err)
}
switch dataType {
case rdf.RdfsClass, rdf.XsdString:
list, ok := res.properties[propKey].([]string)
if !ok {
list = []string{}
}
list = append(list, propVal.(string))
res.properties[propKey] = list
case rdf.NetFirewallRule:
list, ok := res.properties[propKey].([]*FirewallRule)
if !ok {
list = []*FirewallRule{}
}
list = append(list, propVal.(*FirewallRule))
res.properties[propKey] = list
case rdf.NetRoute:
list, ok := res.properties[propKey].([]*Route)
if !ok {
list = []*Route{}
}
list = append(list, propVal.(*Route))
res.properties[propKey] = list
case rdf.Grant:
list, ok := res.properties[propKey].([]*Grant)
if !ok {
list = []*Grant{}
}
list = append(list, propVal.(*Grant))
res.properties[propKey] = list
case rdf.KeyValue:
list, ok := res.properties[propKey].([]*KeyValue)
if !ok {
list = []*KeyValue{}
}
list = append(list, propVal.(*KeyValue))
res.properties[propKey] = list
case rdf.DistributionOrigin:
list, ok := res.properties[propKey].([]*DistributionOrigin)
if !ok {
list = []*DistributionOrigin{}
}
list = append(list, propVal.(*DistributionOrigin))
res.properties[propKey] = list
default:
return fmt.Errorf("unmarshalling property: unexpected datatype %s", dataType)
}
} else {
res.properties[propKey] = propVal
}
}
return nil
}
func (r *Resource) unmarshalMeta(gph tstore.RDFGraph) error {
for _, t := range gph.WithSubjPred(r.Id(), MetaPredicate) {
text, err := tstore.ParseString(t.Object())
if err != nil {
return err
}
r.meta["diff"] = text
}
return nil
}
func namespacedResourceType(typ string) string {
return fmt.Sprintf("%s:%s", rdf.CloudOwlNS, strings.Title(typ))
}
type Resources []*Resource
func (res Resources) Map(f func(*Resource) string) (out []string) {
for _, r := range res {
out = append(out, f(r))
}
return
}
func Subtract(one, other map[string]interface{}) map[string]interface{} {
result := make(map[string]interface{})
for propK, propV := range one {
var found bool
if otherV, ok := other[propK]; ok {
if reflect.DeepEqual(propV, otherV) {
found = true
}
}
if !found {
result[propK] = propV
}
}
return result
}
var errTypeNotFound = errors.New("resource type not found")
func resolveResourceType(g tstore.RDFGraph, id string) (string, error) {
typeTs := g.WithSubjPred(id, rdf.RdfType)
switch len(typeTs) {
case 0:
return "", errTypeNotFound
case 1:
return unmarshalResourceType(typeTs[0].Object())
default:
return "", fmt.Errorf("cannot resolve unique type for resource '%s', got: %v", id, typeTs)
}
}
func lowerFirstLetter(s string) string {
a := []rune(s)
a[0] = unicode.ToLower(a[0])
return string(a)
}
func unmarshalResourceType(obj tstore.Object) (string, error) {
node, ok := obj.Resource()
if !ok {
return "", fmt.Errorf("object is not a resource identifier, %v", obj)
}
return lowerFirstLetter(trimNS(node)), nil
}
func trimNS(s string) string {
spl := strings.Split(s, ":")
if len(spl) == 0 {
return s
}
return spl[len(spl)-1]
}
|
wallix/awless
|
graph/resource.go
|
GO
|
apache-2.0
| 12,290
|
# p_crossfit
Web Site da Crossfit
|
caiodesigner/p_crossfit
|
README.md
|
Markdown
|
apache-2.0
| 34
|
# Convolvulus trinervius Thunb. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Solanales/Convolvulaceae/Convolvulus/Convolvulus trinervius/README.md
|
Markdown
|
apache-2.0
| 179
|
// Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package adwords.axis.v201509.extensions;
import com.google.api.ads.adwords.axis.factory.AdWordsServices;
import com.google.api.ads.adwords.axis.v201509.cm.ApiException;
import com.google.api.ads.adwords.axis.v201509.cm.CampaignExtensionSetting;
import com.google.api.ads.adwords.axis.v201509.cm.CampaignExtensionSettingOperation;
import com.google.api.ads.adwords.axis.v201509.cm.CampaignExtensionSettingReturnValue;
import com.google.api.ads.adwords.axis.v201509.cm.CampaignExtensionSettingServiceInterface;
import com.google.api.ads.adwords.axis.v201509.cm.DayOfWeek;
import com.google.api.ads.adwords.axis.v201509.cm.ExtensionFeedItem;
import com.google.api.ads.adwords.axis.v201509.cm.ExtensionSetting;
import com.google.api.ads.adwords.axis.v201509.cm.FeedItemDevicePreference;
import com.google.api.ads.adwords.axis.v201509.cm.FeedItemSchedule;
import com.google.api.ads.adwords.axis.v201509.cm.FeedItemScheduling;
import com.google.api.ads.adwords.axis.v201509.cm.FeedType;
import com.google.api.ads.adwords.axis.v201509.cm.MinuteOfHour;
import com.google.api.ads.adwords.axis.v201509.cm.Operator;
import com.google.api.ads.adwords.axis.v201509.cm.SitelinkFeedItem;
import com.google.api.ads.adwords.axis.v201509.cm.UrlList;
import com.google.api.ads.adwords.axis.v201509.mcm.Customer;
import com.google.api.ads.adwords.axis.v201509.mcm.CustomerServiceInterface;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.client.auth.oauth2.Credential;
import adwords.axis.v201509.basicoperations.AddCampaigns;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.rmi.RemoteException;
/**
* This example adds sitelinks to a campaign. To create a campaign, run {@link AddCampaigns}.
*
* <p>Credentials and properties in {@code fromFile()} are pulled from the
* "ads.properties" file. See README for more info.
*/
public class AddSiteLinks {
public static void main(String[] args) throws Exception {
// Generate a refreshable OAuth2 credential.
// that can be used in place of a service account.
Credential oAuth2Credential = new OfflineCredentials.Builder().forApi(Api.ADWORDS).fromFile()
.build().generateCredential();
// Construct an AdWordsSession.
AdWordsSession session =
new AdWordsSession.Builder().fromFile().withOAuth2Credential(oAuth2Credential).build();
AdWordsServices adWordsServices = new AdWordsServices();
Long campaignId = Long.valueOf("INSERT_CAMPAIGN_ID_HERE");
runExample(adWordsServices, session, campaignId);
}
public static void runExample(AdWordsServices adWordsServices, AdWordsSession session,
Long campaignId) throws ApiException, RemoteException {
// Get the CustomerService.
CustomerServiceInterface customerService =
adWordsServices.get(session, CustomerServiceInterface.class);
Customer customer = customerService.get();
DateTimeZone customerTimeZone = DateTimeZone.forID(customer.getDateTimeZone());
// Get the CampaignExtensionSettingService.
CampaignExtensionSettingServiceInterface campaignExtensionSettingService =
adWordsServices.get(session, CampaignExtensionSettingServiceInterface.class);
// Create the sitelinks.
SitelinkFeedItem sitelink1 =
createSiteLinkFeedItem("Store Hours", "http://www.example.com/storehours");
// Show the Thanksgiving specials link only from 20 - 27 Nov.
SitelinkFeedItem sitelink2 =
createSiteLinkFeedItem("Thanksgiving Specials", "http://www.example.com/thanksgiving");
// The time zone of the start and end date/times must match the time zone of the customer.
DateTime startTime = new DateTime(DateTime.now().getYear(), 11, 20, 0, 0, 0, customerTimeZone);
if (startTime.isBeforeNow()) {
// Move the startTime to next year if the current date is past November 20th.
startTime = startTime.plusYears(1);
}
sitelink2.setStartTime(startTime.toString("yyyyMMdd HHmmss ZZZ"));
// Use the same year as startTime when creating endTime.
DateTime endTime = new DateTime(startTime.getYear(), 11, 27, 23, 59, 59, customerTimeZone);
sitelink2.setEndTime(endTime.toString("yyyyMMdd HHmmss ZZZ"));
// Show the wifi details primarily for high end mobile users.
SitelinkFeedItem sitelink3 =
createSiteLinkFeedItem("Wifi available", "http://www.example.com/mobile/wifi");
// See https://developers.google.com/adwords/api/docs/appendix/platforms for device criteria
// IDs.
FeedItemDevicePreference devicePreference = new FeedItemDevicePreference(30001L);
sitelink3.setDevicePreference(devicePreference);
// Show the happy hours link only during Mon - Fri 6PM to 9PM.
SitelinkFeedItem sitelink4 =
createSiteLinkFeedItem("Happy hours", "http://www.example.com/happyhours");
sitelink4.setScheduling(new FeedItemScheduling(new FeedItemSchedule[] {
new FeedItemSchedule(DayOfWeek.MONDAY, 18, MinuteOfHour.ZERO, 21, MinuteOfHour.ZERO),
new FeedItemSchedule(DayOfWeek.TUESDAY, 18, MinuteOfHour.ZERO, 21, MinuteOfHour.ZERO),
new FeedItemSchedule(DayOfWeek.WEDNESDAY, 18, MinuteOfHour.ZERO, 21, MinuteOfHour.ZERO),
new FeedItemSchedule(DayOfWeek.THURSDAY, 18, MinuteOfHour.ZERO, 21, MinuteOfHour.ZERO),
new FeedItemSchedule(DayOfWeek.FRIDAY, 18, MinuteOfHour.ZERO, 21, MinuteOfHour.ZERO)}));
// Create your campaign extension settings. This associates the sitelinks
// to your campaign.
CampaignExtensionSetting campaignExtensionSetting = new CampaignExtensionSetting();
campaignExtensionSetting.setCampaignId(campaignId);
campaignExtensionSetting.setExtensionType(FeedType.SITELINK);
ExtensionSetting extensionSetting = new ExtensionSetting();
extensionSetting.setExtensions(
new ExtensionFeedItem[] {sitelink1, sitelink2, sitelink3, sitelink4});
campaignExtensionSetting.setExtensionSetting(extensionSetting);
CampaignExtensionSettingOperation operation = new CampaignExtensionSettingOperation();
operation.setOperand(campaignExtensionSetting);
operation.setOperator(Operator.ADD);
// Add the extensions.
CampaignExtensionSettingReturnValue returnValue =
campaignExtensionSettingService.mutate(new CampaignExtensionSettingOperation[] {operation});
if (returnValue.getValue() != null && returnValue.getValue().length > 0) {
CampaignExtensionSetting newExtensionSetting = returnValue.getValue(0);
System.out.printf("Extension setting with type '%s' was added to campaign ID %d.%n",
newExtensionSetting.getExtensionType().getValue(), newExtensionSetting.getCampaignId());
} else {
System.out.println("No extension settings were created.");
}
}
/**
* Creates a new {@link SitelinkFeedItem} with the specified attributes.
*
* @param sitelinkText the text for the sitelink
* @param sitelinkUrl the URL for the sitelink
* @return a new SitelinkFeedItem
*/
private static SitelinkFeedItem createSiteLinkFeedItem(String sitelinkText, String sitelinkUrl) {
SitelinkFeedItem sitelinkFeedItem = new SitelinkFeedItem();
sitelinkFeedItem.setSitelinkText(sitelinkText);
sitelinkFeedItem.setSitelinkFinalUrls(new UrlList(new String[] {sitelinkUrl}));
return sitelinkFeedItem;
}
}
|
gawkermedia/googleads-java-lib
|
examples/adwords_axis/src/main/java/adwords/axis/v201509/extensions/AddSiteLinks.java
|
Java
|
apache-2.0
| 8,041
|
<!doctype html public "-//W3C//DTD HTML 4.0 Transitional//EN" "http://www.w3.org/TR/REC-html40/loose.dtd">
<html>
<head>
<title>PHPXRef 0.7.1 : Unnamed Project : Function Reference: _flashdata_mark()</title>
<link rel="stylesheet" href="../sample.css" type="text/css">
<link rel="stylesheet" href="../sample-print.css" type="text/css" media="print">
<style id="hilight" type="text/css"></style>
<meta http-equiv="content-type" content="text/html;charset=iso-8859-1">
</head>
<body bgcolor="#ffffff" text="#000000" link="#801800" vlink="#300540" alink="#ffffff">
<table class="pagetitle" width="100%">
<tr>
<td valign="top" class="pagetitle">
[ <a href="../index.html">Index</a> ]
</td>
<td align="right" class="pagetitle">
<h2 style="margin-bottom: 0px">PHP Cross Reference of Unnamed Project</h2>
</td>
</tr>
</table>
<!-- Generated by PHPXref 0.7.1 at Thu Oct 23 18:57:41 2014 -->
<!-- PHPXref (c) 2000-2010 Gareth Watts - gareth@omnipotent.net -->
<!-- http://phpxref.sourceforge.net/ -->
<script src="../phpxref.js" type="text/javascript"></script>
<script language="JavaScript" type="text/javascript">
<!--
ext='.html';
relbase='../';
subdir='_functions';
filename='index.html';
cookiekey='phpxref';
handleNavFrame(relbase, subdir, filename);
logFunction('_flashdata_mark');
// -->
</script>
<script language="JavaScript" type="text/javascript">
if (gwGetCookie('xrefnav')=='off')
document.write('<p class="navlinks">[ <a href="javascript:navOn()">Show Explorer<\/a> ]<\/p>');
else
document.write('<p class="navlinks">[ <a href="javascript:navOff()">Hide Explorer<\/a> ]<\/p>');
</script>
<noscript>
<p class="navlinks">
[ <a href="../nav.html" target="_top">Show Explorer</a> ]
[ <a href="index.html" target="_top">Hide Navbar</a> ]
</p>
</noscript>
[<a href="../index.html">Top level directory</a>]<br>
<script language="JavaScript" type="text/javascript">
<!--
document.writeln('<table align="right" class="searchbox-link"><tr><td><a class="searchbox-link" href="javascript:void(0)" onMouseOver="showSearchBox()">Search</a><br>');
document.writeln('<table border="0" cellspacing="0" cellpadding="0" class="searchbox" id="searchbox">');
document.writeln('<tr><td class="searchbox-title">');
document.writeln('<a class="searchbox-title" href="javascript:showSearchPopup()">Search History +</a>');
document.writeln('<\/td><\/tr>');
document.writeln('<tr><td class="searchbox-body" id="searchbox-body">');
document.writeln('<form name="search" style="margin:0px; padding:0px" onSubmit=\'return jump()\'>');
document.writeln('<a class="searchbox-body" href="../_classes/index.html">Class<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="classname"><br>');
document.writeln('<a id="funcsearchlink" class="searchbox-body" href="../_functions/index.html">Function<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="funcname"><br>');
document.writeln('<a class="searchbox-body" href="../_variables/index.html">Variable<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="varname"><br>');
document.writeln('<a class="searchbox-body" href="../_constants/index.html">Constant<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="constname"><br>');
document.writeln('<a class="searchbox-body" href="../_tables/index.html">Table<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="tablename"><br>');
document.writeln('<input type="submit" class="searchbox-button" value="Search">');
document.writeln('<\/form>');
document.writeln('<\/td><\/tr><\/table>');
document.writeln('<\/td><\/tr><\/table>');
// -->
</script>
<div id="search-popup" class="searchpopup"><p id="searchpopup-title" class="searchpopup-title">title</p><div id="searchpopup-body" class="searchpopup-body">Body</div><p class="searchpopup-close"><a href="javascript:gwCloseActive()">[close]</a></p></div>
<h3>Function and Method Cross Reference</h3>
<h2><a href="index.html#_flashdata_mark">_flashdata_mark()</a></h2>
<b>Defined at:</b><ul>
<li><a href="../bonfire/codeigniter/libraries/Session.php.html#_flashdata_mark">/bonfire/codeigniter/libraries/Session.php</a> -> <a onClick="logFunction('_flashdata_mark', '/bonfire/codeigniter/libraries/Session.php.source.html#l570')" href="../bonfire/codeigniter/libraries/Session.php.source.html#l570"> line 570</a></li>
</ul>
<b>Referenced 1 times:</b><ul>
<li><a href="../bonfire/codeigniter/libraries/Session.php.html">/bonfire/codeigniter/libraries/Session.php</a> -> <a href="../bonfire/codeigniter/libraries/Session.php.source.html#l119"> line 119</a></li>
</ul>
<!-- A link to the phpxref site in your customized footer file is appreciated ;-) -->
<br><hr>
<table width="100%">
<tr><td>Generated: Thu Oct 23 18:57:41 2014</td>
<td align="right"><i>Cross-referenced by <a href="http://phpxref.sourceforge.net/">PHPXref 0.7.1</a></i></td>
</tr>
</table>
</body></html>
|
inputx/code-ref-doc
|
bonfire/_functions/_flashdata_mark.html
|
HTML
|
apache-2.0
| 4,958
|
package de.akquinet.innovation.play.maven;
import de.akquinet.innovation.play.maven.utils.CopyDependenciesEmbeddedMojo;
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiver.MavenArchiveConfiguration;
import org.apache.maven.archiver.MavenArchiver;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.metadata.ArtifactMetadataSource;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactCollector;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.shared.dependency.tree.DependencyNode;
import org.apache.maven.shared.dependency.tree.DependencyTreeBuilder;
import org.apache.maven.shared.dependency.tree.DependencyTreeBuilderException;
import org.codehaus.plexus.archiver.war.WarArchiver;
import org.codehaus.plexus.util.StringUtils;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
/**
* Packages the Play application as War.
*
* @goal package-war
* @phase package
* @requiresDependencyResolution test
*/
public class WarPackageMojo extends AbstractPlay2Mojo {
private static final String META_INF = "META-INF";
private static final String WEB_INF = "WEB-INF";
public static final String TLD_PATH = "WEB-INF/tld/";
public static final String SERVICES_PATH = "WEB-INF/services/";
public static final String MODULES_PATH = "WEB-INF/modules/";
public static final String EXTENSIONS_PATH = "WEB-INF/extensions/";
public static final String CLASSES_PATH = "WEB-INF/classes/";
public static final String LIB_PATH = "WEB-INF/lib/";
/**
* The directory where the webapp is built.
*
* @parameter default-value="${project.build.directory}/${project.build.finalName}"
*/
File webappDirectory;
/**
* Dependencies of the current plugin.
* This list is used to extract and copy the servlet bridge.
*
* @parameter expression="${plugin.artifacts}"
*/
List pluginArtifacts;
/**
* Used to look up Artifacts in the remote repository.
*
* @component
*/
protected ArtifactFactory factory;
/**
* Used to resolve Artifacts in the remote repository.
*
* @component
*/
protected ArtifactResolver resolver;
/**
* Artifact collector, needed to resolve dependencies.
*
* @component role="org.apache.maven.artifact.resolver.ArtifactCollector"
* @required
* @readonly
*/
protected ArtifactCollector artifactCollector;
/**
* The dependency tree builder to use.
*
* @component
* @required
* @readonly
*/
protected DependencyTreeBuilder dependencyTreeBuilder;
/**
* @component role="org.apache.maven.artifact.metadata.ArtifactMetadataSource" hint="maven"
* @required
* @readonly
*/
protected ArtifactMetadataSource artifactMetadataSource;
/**
* Location of the local repository.
*
* @parameter expression="${localRepository}"
* @readonly
* @required
*/
protected ArtifactRepository local;
/**
* List of Remote Repositories used by the resolver
*
* @parameter expression="${project.remoteArtifactRepositories}"
* @readonly
* @required
*/
protected List<ArtifactRepository> remoteRepos;
/**
* The WAR archiver.
*
* @component role="org.codehaus.plexus.archiver.Archiver" roleHint="war"
*/
protected WarArchiver warArchiver;
/**
* Enables or disabled the packaging of the application as a War file.
*
* @parameter default-value=true
*/
boolean buildWar;
/**
* Sets the war classifier.
*
* @parameter default-value=""
*/
String warClassifier;
/**
* Allows customization of the play packaging. The files specified in this attribute will get added to the distribution
* zip file. This allows, for example, to write your own start script and have it packaged in the distribution.
* This is done post-packaging by the play framework.
*
* This parameter is shared with the package mojo.
*
* @parameter
*/
List<String> additionalFiles = new ArrayList<String>();
DependencyNode treeRoot;
public void execute() throws MojoExecutionException, MojoFailureException {
if (!buildWar) {
getLog().info("Skipped War construction");
return;
}
getLog().info("Build war file");
prepareDependencyTree();
try {
// Create a temporary directory
if (webappDirectory.exists()) {
getLog().info(webappDirectory.getAbsolutePath() + " directory existing - deleting");
FileUtils.deleteDirectory(webappDirectory);
}
// Prepare structure.
prepareWarStructure();
// Copy dependencies to the right folders.
copyDependencies();
copyPlayDependencies();
// Copy class files, it must be the final class files built by Play.
copyClassFiles();
// Copy the servlet bridge.
copyServletBridge();
// Build the war file
File warFile = new File(buildDirectory, project.getBuild().getFinalName() + ".war");
packageWarFile(webappDirectory, warFile);
} catch (IOException e) {
getLog().error("Cannot build the War file : ", e);
throw new MojoExecutionException("Error during War file construction", e);
}
}
private void prepareDependencyTree() throws MojoExecutionException {
try {
getLog().debug("Building dependency tree");
treeRoot =
dependencyTreeBuilder.buildDependencyTree(project, local, factory,
artifactMetadataSource, null, artifactCollector);
} catch (DependencyTreeBuilderException e) {
getLog().error("Cannot build the dependency tree", e);
throw new MojoExecutionException("Cannot build the dependency tree", e);
}
}
public File getWebappDirectory() {
return webappDirectory;
}
public ArtifactFactory getFactory() {
return factory;
}
public ArtifactResolver getResolver() {
return resolver;
}
public ArtifactCollector getArtifactCollector() {
return artifactCollector;
}
public ArtifactMetadataSource getArtifactMetadataSource() {
return artifactMetadataSource;
}
public ArtifactRepository getLocal() {
return local;
}
public List<ArtifactRepository> getRemoteRepos() {
return remoteRepos;
}
public String getWarClassifier() {
return warClassifier;
}
private void copyPlayDependencies() throws IOException {
getLog().info("Copying Play runtime and its dependencies");
CopyDependenciesEmbeddedMojo copy = new CopyDependenciesEmbeddedMojo(this, "play-test");
try {
copy.execute();
} catch (MojoExecutionException e) {
getLog().error("Cannot copy play runtime", e);
throw new IOException("Error during the resolution of Play 2 dependencies", e);
}
}
private void copyServletBridge() throws IOException {
// We need to copy two artifacts : play2-war-core-common_2.9.1 and play2-war-core-servlet30_2.9.1
List<Artifact> artifacts = pluginArtifacts;
URL common = getUrlByArtifactId(artifacts, "play2-war-core-common_2.9.1");
URL servlet = getUrlByArtifactId(artifacts, "play2-war-core-servlet30_2.9.1");
FileUtils.copyURLToFile(common, new File(webappDirectory, LIB_PATH + "play2-war-core-common_2.9.1.jar"));
FileUtils.copyURLToFile(servlet, new File(webappDirectory, LIB_PATH + "play2-war-core-servlet30_2.9.1.jar"));
}
/**
* Gets the artifact's URL from the artifact list.
*
* @param artifacts the list of artifact
* @param artifactId the dependency artifact id.
* @return the artifact's URL or <code>null</code> if the URL cannot
* be found.
*/
private URL getUrlByArtifactId(List<Artifact> artifacts, String artifactId) {
for (Artifact artifact : artifacts) {
if (artifact.getArtifactId().equals(artifactId)) {
try {
return artifact.getFile().toURI().toURL();
} catch (MalformedURLException e) {
getLog().error("Cannot compute the url of the artifact : " + artifactId);
}
}
}
return null;
}
private void prepareWarStructure() {
File webinfDir = new File(webappDirectory, WEB_INF);
webinfDir.mkdirs();
File metainfDir = new File(webappDirectory, META_INF);
metainfDir.mkdirs();
}
private void copyClassFiles() throws IOException {
File scala = findScalaDirectory();
File classes = new File(scala, "classes");
if (classes.exists()) {
getLog().info("Copying classes from " + classes + " to " + CLASSES_PATH);
FileUtils.copyDirectory(classes, new File(webappDirectory, CLASSES_PATH));
}
File managedClasses = new File(scala, "classes_managed");
if (managedClasses.exists()) {
getLog().info("Copying classes from " + managedClasses + " to " + CLASSES_PATH);
FileUtils.copyDirectory(managedClasses, new File(webappDirectory, CLASSES_PATH));
}
File resourceClasses = new File(scala, "resource_managed");
if (resourceClasses.exists()) {
getLog().info("Copying resources from " + resourceClasses + " to " + CLASSES_PATH);
FileUtils.copyDirectory(resourceClasses, new File(webappDirectory, CLASSES_PATH));
}
}
private File findScalaDirectory() throws IOException {
File[] array = buildDirectory.listFiles(new FilenameFilter() {
public boolean accept(File file, String s) {
return s.startsWith("scala-");
}
});
if (array.length == 0) {
throw new IOException("Cannot find Play output files");
}
if (array.length > 1) {
throw new IOException("Cannot find Play output files - too many candidates");
}
return array[0];
}
private void packageWarFile(File war, File warFile) throws IOException {
getLog().info("Build war file " + warFile.getAbsolutePath() + " from " + war.getAbsolutePath());
// We build a Jar from the webappDirectory.
MavenArchiver archiver = new MavenArchiver();
archiver.setArchiver(warArchiver);
archiver.setOutputFile(warFile);
try {
warArchiver.addDirectory(webappDirectory);
// Manage additional files if any
if (! additionalFiles.isEmpty()) {
getLog().info("Adding additional files to War file : " + additionalFiles);
for (String file : additionalFiles) {
File fileToAdd = new File(project.getBasedir(), file);
if (!fileToAdd.exists()) {
throw new IOException(fileToAdd.getCanonicalPath() + " not found, can't add to war file");
}
warArchiver.addFile(fileToAdd, fileToAdd.getName());
}
}
warArchiver.setIgnoreWebxml(false);
MavenArchiveConfiguration archive = new MavenArchiveConfiguration();
archiver.createArchive(session, project, archive);
} catch (Exception e) {
getLog().error("Error during the construction of the War file with the archiving process", e);
throw new IOException("Cannot build the War file", e);
}
if (!StringUtils.isBlank(warClassifier)) {
projectHelper.attachArtifact(project, "war", warClassifier, warFile);
} else {
Artifact artifact = project.getArtifact();
if (project.getFile() == null || !project.getFile().exists()) {
artifact.setFile(warFile);
} else {
projectHelper.attachArtifact(project, "war", warFile);
}
}
}
private boolean mustBeEmbedded(Artifact artifact) {
return !artifact.isOptional() &&
EMBEDDED_SCOPES.contains(artifact.getScope()) &&
!artifact.getArtifactId().contains("servlet-api");
}
private List<String> EMBEDDED_SCOPES = Arrays.asList("compile");
public void copyDependencies()
throws IOException {
Set<Artifact> artifacts = project.getDependencyArtifacts();
for (Artifact artifact : artifacts) {
// The file name is just the artifact's file name.
String targetFileName = artifact.getFile().getName();
getLog().info("Processing: " + targetFileName);
if (mustBeEmbedded(artifact)) {
String type = artifact.getType();
if ("tld".equals(type)) {
FileUtils.copyFile(artifact.getFile(), new File(webappDirectory, TLD_PATH + targetFileName));
} else if ("aar".equals(type)) {
FileUtils.copyFile(artifact.getFile(), new File(webappDirectory, SERVICES_PATH + targetFileName));
} else if ("mar".equals(type)) {
FileUtils.copyFile(artifact.getFile(), new File(webappDirectory, MODULES_PATH + targetFileName));
} else if ("xar".equals(type)) {
FileUtils.copyFile(artifact.getFile(), new File(webappDirectory,
EXTENSIONS_PATH + targetFileName));
} else if ("jar".equals(type) || "ejb".equals(type) || "ejb-client".equals(type)
|| "test-jar".equals(type)) {
getLog().info("Copying " + targetFileName + " to " + LIB_PATH);
FileUtils.copyFile(artifact.getFile(), new File(webappDirectory,
LIB_PATH + targetFileName));
} else if ("par".equals(type)) {
targetFileName = targetFileName.substring(0, targetFileName.lastIndexOf('.')) + ".jar";
FileUtils.copyFile(artifact.getFile(), new File(webappDirectory,
LIB_PATH + targetFileName));
} else if ("war".equals(type)) {
getLog().warn("Not supported dependency type : war");
} else if ("zip".equals(type)) {
getLog().warn("Not supported dependency type : zip");
} else {
getLog().debug(
"Artifact of type [" + type + "] is not supported, ignoring [" + artifact + "]");
}
}
}
}
public DependencyNode getDependencyTreeRoot() {
return treeRoot;
}
}
|
cescoffier/maven-play2-plugin
|
src/main/java/de.akquinet.innovation.play.maven/WarPackageMojo.java
|
Java
|
apache-2.0
| 15,311
|
/*
* #%L
* Diana UI Core
* %%
* Copyright (C) 2014 Diana UI
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.dianaui.universal.core.client.ui.gwt;
import com.dianaui.universal.core.client.ui.base.HasResponsiveness;
import com.dianaui.universal.core.client.ui.base.helper.StyleHelper;
import com.dianaui.universal.core.client.ui.constants.DeviceSize;
import com.dianaui.universal.core.client.ui.constants.TableType;
import com.google.gwt.core.client.GWT;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.user.client.ui.Widget;
import com.google.gwt.view.client.ProvidesKey;
/**
* @author Joshua Godi
*/
public class CellTable<T> extends com.google.gwt.user.cellview.client.CellTable<T> implements HasResponsiveness {
private static final int DEFAULT_PAGESIZE = 15;
private static Resources DEFAULT_RESOURCES;
/**
* Constructs a table with a default page size of 15.
*/
public CellTable() {
this(DEFAULT_PAGESIZE);
}
/**
* Constructs a table with the given page size.
*
* @param pageSize the page size
*/
public CellTable(final int pageSize) {
this(pageSize, getDefaultResources());
}
/**
* Constructs a table with a default page size of 15, and the given
* {@link ProvidesKey key provider}.
*
* @param keyProvider an instance of ProvidesKey, or null if the record
* object should act as its own key
*/
public CellTable(final ProvidesKey<T> keyProvider) {
this(DEFAULT_PAGESIZE, keyProvider);
}
/**
* Constructs a table with the given page size with the specified
* {@link Resources}.
*
* @param pageSize the page size
* @param resources the resources to use for this widget
*/
public CellTable(final int pageSize, final Resources resources) {
super(pageSize, resources, null);
}
/**
* Constructs a table with the given page size and the given
* {@link ProvidesKey key provider}.
*
* @param pageSize the page size
* @param keyProvider an instance of ProvidesKey, or null if the record
* object should act as its own key
*/
public CellTable(final int pageSize, final ProvidesKey<T> keyProvider) {
super(pageSize, getDefaultResources(), keyProvider);
}
/**
* Constructs a table with the specified page size, {@link Resources}, key
* provider, and loading indicator.
*
* @param pageSize the page size
* @param resources the resources to use for this widget
* @param keyProvider an instance of ProvidesKey, or null if the record
* object should act as its own key
* @param loadingIndicator the widget to use as a loading indicator, or null
* to disable
*/
public CellTable(final int pageSize, final Resources resources, final ProvidesKey<T> keyProvider, final Widget loadingIndicator) {
this(pageSize, resources, keyProvider, loadingIndicator, true, true);
}
/**
* Constructs a table with the specified page size, {@link Resources}, key
* provider, and loading indicator.
*
* @param pageSize the page size
* @param resources the resources to use for this widget
* @param keyProvider an instance of ProvidesKey, or null if the record
* object should act as its own key
* @param loadingIndicator the widget to use as a loading indicator, or null
* to disable
* @param enableColGroup enable colgroup element. This is used when the table is using fixed
* layout and when column style is added. Ignoring this element will boost rendering
* performance. Note that when colgroup is disabled, {@link #setColumnWidth}
* @param attachLoadingPanel attaching the table section that contains the empty table widget and
* the loading indicator. Attaching this to the table significantly improve the rendering
* performance in webkit based browsers but also introduces significantly larger latency
* in IE. If the panel is not attached to the table, it won't be displayed. But the user
* can call {@link #getTableLoadingSection} and attach it to other elements outside the
* table element
*/
public CellTable(final int pageSize, final Resources resources, final ProvidesKey<T> keyProvider,
final Widget loadingIndicator, final boolean enableColGroup, final boolean attachLoadingPanel) {
super(pageSize, resources, keyProvider, loadingIndicator, enableColGroup, attachLoadingPanel);
StyleHelper.addEnumStyleName(this, TableType.DEFAULT);
}
private static Resources getDefaultResources() {
if (DEFAULT_RESOURCES == null) {
final CellTable.Resources cellTableResources = GWT.create(CellTable.Resources.class);
DEFAULT_RESOURCES = new ResourcesAdapter(cellTableResources);
}
return DEFAULT_RESOURCES;
}
@Override
public void setVisibleOn(final DeviceSize deviceSize) {
StyleHelper.setVisibleOn(this, deviceSize);
}
@Override
public void setHiddenOn(final DeviceSize deviceSize) {
StyleHelper.setHiddenOn(this, deviceSize);
}
public void setStriped(final boolean striped) {
if (striped) {
StyleHelper.addEnumStyleName(this, TableType.STRIPED);
} else {
StyleHelper.removeEnumStyleName(this, TableType.STRIPED);
}
}
public void setBordered(final boolean bordered) {
if (bordered) {
StyleHelper.addEnumStyleName(this, TableType.BORDERED);
} else {
StyleHelper.removeEnumStyleName(this, TableType.BORDERED);
}
}
public void setCondensed(final boolean condensed) {
if (condensed) {
StyleHelper.addEnumStyleName(this, TableType.CONDENSED);
} else {
StyleHelper.removeEnumStyleName(this, TableType.CONDENSED);
}
}
public void setHover(final boolean hover) {
if (hover) {
StyleHelper.addEnumStyleName(this, TableType.HOVER);
} else {
StyleHelper.removeEnumStyleName(this, TableType.HOVER);
}
}
/**
* Resources/Styles to remove the GWT styling of the tables!
*/
private static class ResourcesAdapter implements CellTable.Resources {
private final CellTable.Resources resources;
private final StyleAdapter style;
public ResourcesAdapter(final CellTable.Resources resources) {
this.resources = resources;
this.style = new StyleAdapter();
}
@Override
public ImageResource cellTableFooterBackground() {
return resources.cellTableFooterBackground();
}
@Override
public ImageResource cellTableHeaderBackground() {
return resources.cellTableHeaderBackground();
}
@Override
public ImageResource cellTableLoading() {
return resources.cellTableLoading();
}
@Override
public ImageResource cellTableSelectedBackground() {
return resources.cellTableSelectedBackground();
}
@Override
public ImageResource cellTableSortAscending() {
return resources.cellTableSortAscending();
}
@Override
public ImageResource cellTableSortDescending() {
return resources.cellTableSortDescending();
}
@Override
public Style cellTableStyle() {
return style;
}
}
private static class StyleAdapter implements CellTable.Style {
private static final String B = "gwtb3-";
private static final String DUMMY = B + "d";
@Override
public boolean ensureInjected() {
return true;
}
@Override
public String cellTableCell() {
return B + "cell";
}
@Override
public String cellTableEvenRow() {
return "even";
}
@Override
public String cellTableEvenRowCell() {
return DUMMY;
}
@Override
public String cellTableFirstColumn() {
return DUMMY; // Bootstrap3 uses "smart selectors"
}
@Override
public String cellTableFirstColumnFooter() {
return DUMMY;
}
@Override
public String cellTableFirstColumnHeader() {
return DUMMY;
}
@Override
public String cellTableFooter() {
return DUMMY;
}
@Override
public String cellTableHeader() {
return DUMMY;
}
@Override
public String cellTableHoveredRow() {
return "active";
}
@Override
public String cellTableHoveredRowCell() {
return "active";
}
@Override
public String cellTableKeyboardSelectedCell() {
return DUMMY;
}
@Override
public String cellTableKeyboardSelectedRow() {
return DUMMY;
}
@Override
public String cellTableKeyboardSelectedRowCell() {
return DUMMY;
}
@Override
public String cellTableLastColumn() {
return DUMMY;
}
@Override
public String cellTableLastColumnFooter() {
return DUMMY;
}
@Override
public String cellTableLastColumnHeader() {
return DUMMY;
}
@Override
public String cellTableLoading() {
return DUMMY;
}
@Override
public String cellTableOddRow() {
return "odd";
}
@Override
public String cellTableOddRowCell() {
return DUMMY;
}
@Override
public String cellTableSelectedRow() {
return "info";
}
@Override
public String cellTableSelectedRowCell() {
return DUMMY;
}
@Override
public String cellTableSortableHeader() {
return DUMMY;
}
@Override
public String cellTableSortedHeaderAscending() {
return DUMMY;
}
@Override
public String cellTableSortedHeaderDescending() {
return DUMMY;
}
@Override
public String cellTableWidget() {
return "table";
}
@Override
public String getText() {
return B;
}
@Override
public String getName() {
return B;
}
}
}
|
dianaui/dianaui-universal
|
core/src/main/java/com/dianaui/universal/core/client/ui/gwt/CellTable.java
|
Java
|
apache-2.0
| 11,556
|
package com.sequenceiq.freeipa.flow.stack.termination.handler;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.sequenceiq.cloudbreak.ccm.termination.CcmResourceTerminationListener;
import com.sequenceiq.cloudbreak.ccm.termination.CcmV2AgentTerminationListener;
import com.sequenceiq.cloudbreak.common.event.Selectable;
import com.sequenceiq.flow.event.EventSelectorUtil;
import com.sequenceiq.flow.reactor.api.handler.EventHandler;
import com.sequenceiq.freeipa.flow.stack.termination.event.ccm.CcmKeyDeregistrationFinished;
import com.sequenceiq.freeipa.flow.stack.termination.event.ccm.CcmKeyDeregistrationRequest;
import reactor.bus.Event;
import reactor.bus.EventBus;
@Component
public class CcmKeyDeregistrationHandler implements EventHandler<CcmKeyDeregistrationRequest> {
private static final Logger LOGGER = LoggerFactory.getLogger(CcmKeyDeregistrationHandler.class);
@Inject
private CcmResourceTerminationListener ccmResourceTerminationListener;
@Inject
private EventBus eventBus;
@Inject
private CcmV2AgentTerminationListener ccmV2AgentTerminationListener;
@Override
public void accept(Event<CcmKeyDeregistrationRequest> requestEvent) {
CcmKeyDeregistrationRequest request = requestEvent.getData();
try {
if (request.getTunnel().useCcmV1()) {
LOGGER.debug("De-registering CCM key '{}' for freeipa stack '{}'", request.getMinaSshdServiceId(), request.getResourceId());
ccmResourceTerminationListener.deregisterCcmSshTunnelingKey(request.getActorCrn(), request.getAccountId(), request.getKeyId(),
request.getMinaSshdServiceId());
LOGGER.debug("De-registered CCM key '{}' for freeipa stack '{}'", request.getMinaSshdServiceId(), request.getResourceId());
} else if (request.getTunnel().useCcmV2OrJumpgate()) {
LOGGER.debug("De-registering CCM V2 key '{}' for freeipa stack {}", request.getCcmV2AgentCrn(), request.getResourceId());
ccmV2AgentTerminationListener.deregisterInvertingProxyAgent(request.getCcmV2AgentCrn());
LOGGER.debug("De-registered CCM V2 key '{}' for freeipa stack {}", request.getCcmV2AgentCrn(), request.getResourceId());
} else {
LOGGER.debug("CCM is DISABLED, skipping de-registering of key from CCM. FreeIPA stack: {}", request.getResourceId());
}
} catch (Exception ex) {
LOGGER.warn("CCM key de-registration failed", ex);
}
Selectable result = new CcmKeyDeregistrationFinished(request.getResourceId(), request.getForced());
eventBus.notify(result.selector(), new Event<>(requestEvent.getHeaders(), result));
}
@Override
public String selector() {
return EventSelectorUtil.selector(CcmKeyDeregistrationRequest.class);
}
}
|
hortonworks/cloudbreak
|
freeipa/src/main/java/com/sequenceiq/freeipa/flow/stack/termination/handler/CcmKeyDeregistrationHandler.java
|
Java
|
apache-2.0
| 2,966
|
package com.sequenceiq.redbeams.api.endpoint.v4.database.base;
import java.io.Serializable;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;
import javax.validation.constraints.Size;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.sequenceiq.cloudbreak.auth.crn.CrnResourceDescriptor;
import com.sequenceiq.cloudbreak.validation.ValidCrn;
import com.sequenceiq.redbeams.doc.ModelDescriptions;
import com.sequenceiq.redbeams.doc.ModelDescriptions.Database;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
@ApiModel(description = ModelDescriptions.DATABASE_IDENTIFIERS)
@JsonIgnoreProperties(ignoreUnknown = true)
public class DatabaseV4Identifiers implements Serializable {
@NotNull
@Size(max = 100, min = 5, message = "The length of the database's name must be between 5 to 100, inclusive")
@Pattern(regexp = "(^[a-z][-a-z0-9]*[a-z0-9]$)",
message = "The database's name may only contain lowercase characters, digits, and hyphens, and must start with an alphanumeric character")
@ApiModelProperty(value = Database.NAME, required = true)
private String name;
@NotNull
@ValidCrn(resource = CrnResourceDescriptor.ENVIRONMENT)
@ApiModelProperty(value = Database.ENVIRONMENT_CRN, required = true)
private String environmentCrn;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEnvironmentCrn() {
return environmentCrn;
}
public void setEnvironmentCrn(String environmentCrn) {
this.environmentCrn = environmentCrn;
}
}
|
hortonworks/cloudbreak
|
redbeams-api/src/main/java/com/sequenceiq/redbeams/api/endpoint/v4/database/base/DatabaseV4Identifiers.java
|
Java
|
apache-2.0
| 1,707
|
#include "HvacZone.h"
#include <QMutex>
class HvacZonePrivate {
public:
HvacZone *self;
QMutex m_mutex;
QString m_name;
QString m_mode;
QString m_fanMode;
QString name() const
{
return m_name;
}
void setName(QString arg)
{
lock();
if (m_name != arg) {
m_name = arg;
unlock();
emit self->nameChanged(arg);
} else {
unlock();
}
}
QString mode() const
{
return m_mode;
}
void setMode(QString arg)
{
lock();
if (m_mode != arg) {
m_mode = arg;
unlock();
emit self->modeChanged(arg);
} else {
unlock();
}
}
QString fanMode() const
{
return m_fanMode;
}
void setFanMode(QString arg)
{
lock();
if (m_fanMode != arg) {
m_fanMode = arg;
unlock();
emit self->fanModeChanged(arg);
} else {
unlock();
}
}
void lock() {
m_mutex.lock();
}
void unlock() {
m_mutex.unlock();
}
};
HvacZone::HvacZone(QObject *parent) :
QObject(parent),
p(new HvacZonePrivate)
{
p->self = this;
}
HvacZone::~HvacZone()
{
}
void HvacZone::lock()
{
p->lock();
}
void HvacZone::unlock()
{
p->unlock();
}
QString HvacZone::name() const
{
return p->name();
}
QString HvacZone::mode() const
{
return p->mode();
}
QString HvacZone::fanMode() const
{
return p->fanMode();
}
void HvacZone::setName(QString arg)
{
p->setName(arg);
}
void HvacZone::setMode(QString arg)
{
p->setMode(arg);
}
void HvacZone::setFanMode(QString arg)
{
p->setFanMode(arg);
}
|
CaptEmulation/hvac
|
hvac-service-lib/HvacZone.cpp
|
C++
|
apache-2.0
| 1,762
|
name "dependencies"
description "Simple drop-in replacement for those who are tied to dependencies cookbook from AWS Opsworks"
maintainer "TranceLove (airwave209gt@gmail.com)"
license "Apache 2.0"
version "0.0.1"
depends "packages"
attribute "dependencies/debs",
:display_name => "Debian packages to install",
:description => "A list of Debian packages (.deb) to install",
:required => false
attribute "dependencies/update_debs",
:display_name => "Update sources",
:description => "Update sources using apt-get update",
:required => false
attribute "dependencies/upgrade_debs",
:display_name => "Update packages",
:description => "Update packages using apt-get upgrade",
:required => false
|
TranceLove/chef-dependencies
|
metadata.rb
|
Ruby
|
apache-2.0
| 730
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Fri Dec 2 15:05:18 2011 by generateDS.py version 2.7b.
#
import sys
import getopt
import re as re_
etree_ = None
Verbose_import_ = False
( XMLParser_import_none, XMLParser_import_lxml,
XMLParser_import_elementtree
) = range(3)
XMLParser_import_library = None
try:
# lxml
from lxml import etree as etree_
XMLParser_import_library = XMLParser_import_lxml
if Verbose_import_:
print("running with lxml.etree")
except ImportError:
try:
# cElementTree from Python 2.5+
import xml.etree.cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree on Python 2.5+")
except ImportError:
try:
# ElementTree from Python 2.5+
import xml.etree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree on Python 2.5+")
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree")
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree")
except ImportError:
raise ImportError("Failed to import ElementTree from any known place")
def parsexml_(*args, **kwargs):
if (XMLParser_import_library == XMLParser_import_lxml and
'parser' not in kwargs):
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
kwargs['parser'] = etree_.ETCompatXMLParser()
doc = etree_.parse(*args, **kwargs)
return doc
#
# User methods
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError, exp:
class GeneratedsSuper(object):
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_integer_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of integers')
return input_data
def gds_format_float(self, input_data, input_name=''):
return '%f' % input_data
def gds_validate_float(self, input_data, node, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_float_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of floats')
return input_data
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_double_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
fvalue = float(value)
except (TypeError, ValueError), exp:
raise_parse_error(node, 'Requires sequence of doubles')
return input_data
def gds_format_boolean(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean(self, input_data, node, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(node, 'Requires sequence of booleans ("true", "1", "false", "0")')
return input_data
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'ascii'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
#
# Support/utility functions.
#
def showIndent(outfile, level):
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
if not inStr:
return ''
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
if XMLParser_import_library == XMLParser_import_lxml:
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
else:
msg = '%s (element %s)' % (msg, node.tag, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(outfile, level, namespace,name)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (self.name, self.value, self.name))
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n' % \
(self.category, self.content_type, self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s", "%s"),\n' % \
(self.category, self.content_type, self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write('model_.MixedContainer(%d, %d, "%s",\n' % \
(self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0):
self.name = name
self.data_type = data_type
self.container = container
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class EnvelopeType(GeneratedsSuper):
"""Root OVF descriptor type"""
subclass = None
superclass = None
def __init__(self, lang='en-US', References=None, Section=None, Content=None, Strings=None):
self.lang = _cast(None, lang)
self.References = References
if Section is None:
self.Section = []
else:
self.Section = Section
self.Content = Content
if Strings is None:
self.Strings = []
else:
self.Strings = Strings
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if EnvelopeType.subclass:
return EnvelopeType.subclass(*args_, **kwargs_)
else:
return EnvelopeType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_References(self): return self.References
def set_References(self, References): self.References = References
def get_Section(self): return self.Section
def set_Section(self, Section): self.Section = Section
def add_Section(self, value): self.Section.append(value)
def insert_Section(self, index, value): self.Section[index] = value
def get_Content(self): return self.Content
def set_Content(self, Content): self.Content = Content
def get_Strings(self): return self.Strings
def set_Strings(self, Strings): self.Strings = Strings
def add_Strings(self, value): self.Strings.append(value)
def insert_Strings(self, index, value): self.Strings[index] = value
def get_lang(self): return self.lang
def set_lang(self, lang): self.lang = lang
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='EnvelopeType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='EnvelopeType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='EnvelopeType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.lang is not None and 'lang' not in already_processed:
already_processed.append('lang')
outfile.write(' lang=%s' % (self.gds_format_string(quote_attrib(self.lang).encode(ExternalEncoding), input_name='lang'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='EnvelopeType', fromsubclass_=False):
if self.References is not None:
self.References.export(outfile, level, namespace_, name_='References', )
for Section_ in self.Section:
Section_.export(outfile, level, namespace_, name_='Section')
if self.Content is not None:
self.Content.export(outfile, level, namespace_, name_='Content', )
for Strings_ in self.Strings:
Strings_.export(outfile, level, namespace_, name_='Strings')
def hasContent_(self):
if (
self.References is not None or
self.Section or
self.Content is not None or
self.Strings
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='EnvelopeType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.lang is not None and 'lang' not in already_processed:
already_processed.append('lang')
showIndent(outfile, level)
outfile.write('lang = "%s",\n' % (self.lang,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.References is not None:
showIndent(outfile, level)
outfile.write('References=model_.References_Type(\n')
self.References.exportLiteral(outfile, level, name_='References')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Section=[\n')
level += 1
for Section_ in self.Section:
showIndent(outfile, level)
outfile.write('model_.Section(\n')
Section_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.Content is not None:
showIndent(outfile, level)
outfile.write('Content=model_.Content(\n')
self.Content.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Strings=[\n')
level += 1
for Strings_ in self.Strings:
showIndent(outfile, level)
outfile.write('model_.Strings_Type(\n')
Strings_.exportLiteral(outfile, level, name_='Strings_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('lang', node)
if value is not None and 'lang' not in already_processed:
already_processed.append('lang')
self.lang = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'References':
obj_ = References_Type.factory()
obj_.build(child_)
self.set_References(obj_)
elif nodeName_ == 'Section':
class_obj_ = self.get_class_obj_(child_, Section_Type)
obj_ = class_obj_.factory()
obj_.build(child_)
self.Section.append(obj_)
elif nodeName_ == 'Content':
class_obj_ = self.get_class_obj_(child_, Content_Type)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Content(obj_)
elif nodeName_ == 'Strings':
obj_ = Strings_Type.factory()
obj_.build(child_)
self.Strings.append(obj_)
# end class EnvelopeType
class References_Type(GeneratedsSuper):
"""Type for list of external resources"""
subclass = None
superclass = None
def __init__(self, File=None, anytypeobjs_=None):
if File is None:
self.File = []
else:
self.File = File
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if References_Type.subclass:
return References_Type.subclass(*args_, **kwargs_)
else:
return References_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_File(self): return self.File
def set_File(self, File): self.File = File
def add_File(self, value): self.File.append(value)
def insert_File(self, index, value): self.File[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='References_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='References_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='References_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='References_Type', fromsubclass_=False):
for File_ in self.File:
File_.export(outfile, level, namespace_, name_='File')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.File or
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='References_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('File=[\n')
level += 1
for File_ in self.File:
showIndent(outfile, level)
outfile.write('model_.File_Type(\n')
File_.exportLiteral(outfile, level, name_='File_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'File':
obj_ = File_Type.factory()
obj_.build(child_)
self.File.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'References_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class References_Type
class File_Type(GeneratedsSuper):
"""Type for an external reference to a resourceReference key used in
other parts of the packageLocation of external resourceSize in
bytes of the files (if known)Compression type (gzip, bzip2, or
none if empty or not specified)Chunk size (except for last
chunk)"""
subclass = None
superclass = None
def __init__(self, compression='', href=None, chunkSize=None, id=None, size=None, anytypeobjs_=None):
self.compression = _cast(None, compression)
self.href = _cast(None, href)
self.chunkSize = _cast(int, chunkSize)
self.id = _cast(None, id)
self.size = _cast(int, size)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if File_Type.subclass:
return File_Type.subclass(*args_, **kwargs_)
else:
return File_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_compression(self): return self.compression
def set_compression(self, compression): self.compression = compression
def get_href(self): return self.href
def set_href(self, href): self.href = href
def get_chunkSize(self): return self.chunkSize
def set_chunkSize(self, chunkSize): self.chunkSize = chunkSize
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_size(self): return self.size
def set_size(self, size): self.size = size
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='File_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='File_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='File_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.compression is not None and 'compression' not in already_processed:
already_processed.append('compression')
outfile.write(' compression=%s' % (self.gds_format_string(quote_attrib(self.compression).encode(ExternalEncoding), input_name='compression'), ))
if self.href is not None and 'href' not in already_processed:
already_processed.append('href')
outfile.write(' href=%s' % (self.gds_format_string(quote_attrib(self.href).encode(ExternalEncoding), input_name='href'), ))
if self.chunkSize is not None and 'chunkSize' not in already_processed:
already_processed.append('chunkSize')
outfile.write(' chunkSize="%s"' % self.gds_format_integer(self.chunkSize, input_name='chunkSize'))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
if self.size is not None and 'size' not in already_processed:
already_processed.append('size')
outfile.write(' size="%s"' % self.gds_format_integer(self.size, input_name='size'))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='File_Type', fromsubclass_=False):
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='File_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.compression is not None and 'compression' not in already_processed:
already_processed.append('compression')
showIndent(outfile, level)
outfile.write('compression = "%s",\n' % (self.compression,))
if self.href is not None and 'href' not in already_processed:
already_processed.append('href')
showIndent(outfile, level)
outfile.write('href = "%s",\n' % (self.href,))
if self.chunkSize is not None and 'chunkSize' not in already_processed:
already_processed.append('chunkSize')
showIndent(outfile, level)
outfile.write('chunkSize = %d,\n' % (self.chunkSize,))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
if self.size is not None and 'size' not in already_processed:
already_processed.append('size')
showIndent(outfile, level)
outfile.write('size = %d,\n' % (self.size,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('compression', node)
if value is not None and 'compression' not in already_processed:
already_processed.append('compression')
self.compression = value
value = find_attr_value_('href', node)
if value is not None and 'href' not in already_processed:
already_processed.append('href')
self.href = value
value = find_attr_value_('chunkSize', node)
if value is not None and 'chunkSize' not in already_processed:
already_processed.append('chunkSize')
try:
self.chunkSize = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
value = find_attr_value_('size', node)
if value is not None and 'size' not in already_processed:
already_processed.append('size')
try:
self.size = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'File_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class File_Type
class Content_Type(GeneratedsSuper):
"""Base class for content"""
subclass = None
superclass = None
def __init__(self, id=None, Info=None, Name=None, Section=None, extensiontype_=None):
self.id = _cast(None, id)
self.Info = Info
self.Name = Name
if Section is None:
self.Section = []
else:
self.Section = Section
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if Content_Type.subclass:
return Content_Type.subclass(*args_, **kwargs_)
else:
return Content_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Info(self): return self.Info
def set_Info(self, Info): self.Info = Info
def get_Name(self): return self.Name
def set_Name(self, Name): self.Name = Name
def get_Section(self): return self.Section
def set_Section(self, Section): self.Section = Section
def add_Section(self, value): self.Section.append(value)
def insert_Section(self, index, value): self.Section[index] = value
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='Content_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Content_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='Content_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='ovf:', name_='Content_Type', fromsubclass_=False):
if self.Info is not None:
self.Info.export(outfile, level, namespace_, name_='Info', )
if self.Name is not None:
self.Name.export(outfile, level, namespace_, name_='Name')
for Section_ in self.Section:
Section_.export(outfile, level, namespace_, name_='Section')
def hasContent_(self):
if (
self.Info is not None or
self.Name is not None or
self.Section
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Content_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Info is not None:
showIndent(outfile, level)
outfile.write('Info=model_.Msg_Type(\n')
self.Info.exportLiteral(outfile, level, name_='Info')
showIndent(outfile, level)
outfile.write('),\n')
if self.Name is not None:
showIndent(outfile, level)
outfile.write('Name=model_.Msg_Type(\n')
self.Name.exportLiteral(outfile, level, name_='Name')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Section=[\n')
level += 1
for Section_ in self.Section:
showIndent(outfile, level)
outfile.write('model_.Section(\n')
Section_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Info':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Info(obj_)
elif nodeName_ == 'Name':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Name(obj_)
elif nodeName_ == 'Section':
class_obj_ = self.get_class_obj_(child_, Section_Type)
obj_ = class_obj_.factory()
obj_.build(child_)
self.Section.append(obj_)
# end class Content_Type
class VirtualSystem_Type(Content_Type):
"""Content describing a virtual system"""
subclass = None
superclass = Content_Type
def __init__(self, id=None, Info=None, Name=None, Section=None):
super(VirtualSystem_Type, self).__init__(id, Info, Name, Section, )
pass
def factory(*args_, **kwargs_):
if VirtualSystem_Type.subclass:
return VirtualSystem_Type.subclass(*args_, **kwargs_)
else:
return VirtualSystem_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def export(self, outfile, level, namespace_='ovf:', name_='VirtualSystem_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualSystem_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='VirtualSystem_Type'):
super(VirtualSystem_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualSystem_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='VirtualSystem_Type', fromsubclass_=False):
super(VirtualSystem_Type, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(VirtualSystem_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VirtualSystem_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(VirtualSystem_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(VirtualSystem_Type, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(VirtualSystem_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(VirtualSystem_Type, self).buildChildren(child_, node, nodeName_, True)
pass
# end class VirtualSystem_Type
class VirtualSystemCollection_Type(Content_Type):
"""A collection of Content."""
subclass = None
superclass = Content_Type
def __init__(self, id=None, Info=None, Name=None, Section=None, Content=None):
super(VirtualSystemCollection_Type, self).__init__(id, Info, Name, Section, )
if Content is None:
self.Content = []
else:
self.Content = Content
def factory(*args_, **kwargs_):
if VirtualSystemCollection_Type.subclass:
return VirtualSystemCollection_Type.subclass(*args_, **kwargs_)
else:
return VirtualSystemCollection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Content(self): return self.Content
def set_Content(self, Content): self.Content = Content
def add_Content(self, value): self.Content.append(value)
def insert_Content(self, index, value): self.Content[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='VirtualSystemCollection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualSystemCollection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='VirtualSystemCollection_Type'):
super(VirtualSystemCollection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualSystemCollection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='VirtualSystemCollection_Type', fromsubclass_=False):
super(VirtualSystemCollection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Content_ in self.Content:
Content_.export(outfile, level, namespace_, name_='Content')
def hasContent_(self):
if (
self.Content or
super(VirtualSystemCollection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VirtualSystemCollection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(VirtualSystemCollection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(VirtualSystemCollection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Content=[\n')
level += 1
for Content_ in self.Content:
showIndent(outfile, level)
outfile.write('model_.Content(\n')
Content_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(VirtualSystemCollection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Content':
class_obj_ = self.get_class_obj_(child_, Content_Type)
obj_ = class_obj_.factory()
obj_.build(child_)
self.Content.append(obj_)
super(VirtualSystemCollection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class VirtualSystemCollection_Type
class Strings_Type(GeneratedsSuper):
"""Type for string resource bundleLocale for this string resource
bundleReference to external resource bundle"""
subclass = None
superclass = None
def __init__(self, lang=None, fileRef=None, Msg=None):
self.lang = _cast(None, lang)
self.fileRef = _cast(None, fileRef)
if Msg is None:
self.Msg = []
else:
self.Msg = Msg
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if Strings_Type.subclass:
return Strings_Type.subclass(*args_, **kwargs_)
else:
return Strings_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Msg(self): return self.Msg
def set_Msg(self, Msg): self.Msg = Msg
def add_Msg(self, value): self.Msg.append(value)
def insert_Msg(self, index, value): self.Msg[index] = value
def get_lang(self): return self.lang
def set_lang(self, lang): self.lang = lang
def get_fileRef(self): return self.fileRef
def set_fileRef(self, fileRef): self.fileRef = fileRef
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='Strings_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Strings_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='Strings_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.lang is not None and 'lang' not in already_processed:
already_processed.append('lang')
outfile.write(' lang=%s' % (self.gds_format_string(quote_attrib(self.lang).encode(ExternalEncoding), input_name='lang'), ))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
outfile.write(' fileRef=%s' % (self.gds_format_string(quote_attrib(self.fileRef).encode(ExternalEncoding), input_name='fileRef'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='Strings_Type', fromsubclass_=False):
for Msg_ in self.Msg:
Msg_.export(outfile, level, namespace_, name_='Msg')
def hasContent_(self):
if (
self.Msg
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Strings_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.lang is not None and 'lang' not in already_processed:
already_processed.append('lang')
showIndent(outfile, level)
outfile.write('lang = "%s",\n' % (self.lang,))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
showIndent(outfile, level)
outfile.write('fileRef = "%s",\n' % (self.fileRef,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Msg=[\n')
level += 1
for Msg_ in self.Msg:
showIndent(outfile, level)
outfile.write('model_.MsgType(\n')
Msg_.exportLiteral(outfile, level, name_='MsgType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('lang', node)
if value is not None and 'lang' not in already_processed:
already_processed.append('lang')
self.lang = value
value = find_attr_value_('fileRef', node)
if value is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
self.fileRef = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Msg':
obj_ = MsgType.factory()
obj_.build(child_)
self.Msg.append(obj_)
# end class Strings_Type
class Section_Type(GeneratedsSuper):
"""Base type for Sections, subclassing this is the most common form of
extensibility. Subtypes define more specific elements."""
subclass = None
superclass = None
def __init__(self, required=None, Info=None, extensiontype_=None):
self.required = _cast(None, required)
self.Info = Info
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if Section_Type.subclass:
return Section_Type.subclass(*args_, **kwargs_)
else:
return Section_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Info(self): return self.Info
def set_Info(self, Info): self.Info = Info
def get_required(self): return self.required
def set_required(self, required): self.required = required
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='Section_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Section_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='Section_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.required is not None and 'required' not in already_processed:
already_processed.append('required')
outfile.write(' required=%s' % (self.gds_format_string(quote_attrib(self.required).encode(ExternalEncoding), input_name='required'), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='ovf:', name_='Section_Type', fromsubclass_=False):
if self.Info is not None:
self.Info.export(outfile, level, namespace_, name_='Info', )
def hasContent_(self):
if (
self.Info is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Section_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.required is not None and 'required' not in already_processed:
already_processed.append('required')
showIndent(outfile, level)
outfile.write('required = "%s",\n' % (self.required,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Info is not None:
showIndent(outfile, level)
outfile.write('Info=model_.Msg_Type(\n')
self.Info.exportLiteral(outfile, level, name_='Info')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('required', node)
if value is not None and 'required' not in already_processed:
already_processed.append('required')
self.required = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Info':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Info(obj_)
# end class Section_Type
class Msg_Type(GeneratedsSuper):
"""Type for localizable stringDefault string valueIdentifier for lookup
in string resource bundle for alternate locale"""
subclass = None
superclass = None
def __init__(self, msgid='', valueOf_=None):
self.msgid = _cast(None, msgid)
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if Msg_Type.subclass:
return Msg_Type.subclass(*args_, **kwargs_)
else:
return Msg_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_msgid(self): return self.msgid
def set_msgid(self, msgid): self.msgid = msgid
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='Msg_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Msg_Type')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='Msg_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.msgid is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
outfile.write(' msgid=%s' % (self.gds_format_string(quote_attrib(self.msgid).encode(ExternalEncoding), input_name='msgid'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='Msg_Type', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Msg_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.msgid is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
showIndent(outfile, level)
outfile.write('msgid = "%s",\n' % (self.msgid,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('msgid', node)
if value is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
self.msgid = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Msg_Type
class AnnotationSection_Type(Section_Type):
"""User defined annotation"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Annotation=None, anytypeobjs_=None):
super(AnnotationSection_Type, self).__init__(required, Info, )
self.Annotation = Annotation
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if AnnotationSection_Type.subclass:
return AnnotationSection_Type.subclass(*args_, **kwargs_)
else:
return AnnotationSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Annotation(self): return self.Annotation
def set_Annotation(self, Annotation): self.Annotation = Annotation
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='AnnotationSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='AnnotationSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='AnnotationSection_Type'):
super(AnnotationSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='AnnotationSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='AnnotationSection_Type', fromsubclass_=False):
super(AnnotationSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
if self.Annotation is not None:
self.Annotation.export(outfile, level, namespace_, name_='Annotation', )
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Annotation is not None or
self.anytypeobjs_ or
super(AnnotationSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='AnnotationSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(AnnotationSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(AnnotationSection_Type, self).exportLiteralChildren(outfile, level, name_)
if self.Annotation is not None:
showIndent(outfile, level)
outfile.write('Annotation=model_.Msg_Type(\n')
self.Annotation.exportLiteral(outfile, level, name_='Annotation')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(AnnotationSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Annotation':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Annotation(obj_)
else:
obj_ = self.gds_build_any(child_, 'AnnotationSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(AnnotationSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class AnnotationSection_Type
class ProductSection_Type(Section_Type):
"""Product information for a virtual applianceProperties for
application-level customizationProperty identifier
prefixProperty identifier suffix"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, instance='', classxx='', Product=None, Vendor=None, Version=None, FullVersion=None, ProductUrl=None, VendorUrl=None, AppUrl=None, Icon=None, Category=None, Property=None, anytypeobjs_=None):
super(ProductSection_Type, self).__init__(required, Info, )
self.instance = _cast(None, instance)
self.classxx = _cast(None, classxx)
self.Product = Product
self.Vendor = Vendor
self.Version = Version
self.FullVersion = FullVersion
self.ProductUrl = ProductUrl
self.VendorUrl = VendorUrl
self.AppUrl = AppUrl
if Icon is None:
self.Icon = []
else:
self.Icon = Icon
if Category is None:
self.Category = []
else:
self.Category = Category
if Property is None:
self.Property = []
else:
self.Property = Property
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if ProductSection_Type.subclass:
return ProductSection_Type.subclass(*args_, **kwargs_)
else:
return ProductSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Product(self): return self.Product
def set_Product(self, Product): self.Product = Product
def get_Vendor(self): return self.Vendor
def set_Vendor(self, Vendor): self.Vendor = Vendor
def get_Version(self): return self.Version
def set_Version(self, Version): self.Version = Version
def get_FullVersion(self): return self.FullVersion
def set_FullVersion(self, FullVersion): self.FullVersion = FullVersion
def get_ProductUrl(self): return self.ProductUrl
def set_ProductUrl(self, ProductUrl): self.ProductUrl = ProductUrl
def get_VendorUrl(self): return self.VendorUrl
def set_VendorUrl(self, VendorUrl): self.VendorUrl = VendorUrl
def get_AppUrl(self): return self.AppUrl
def set_AppUrl(self, AppUrl): self.AppUrl = AppUrl
def get_Icon(self): return self.Icon
def set_Icon(self, Icon): self.Icon = Icon
def add_Icon(self, value): self.Icon.append(value)
def insert_Icon(self, index, value): self.Icon[index] = value
def get_Category(self): return self.Category
def set_Category(self, Category): self.Category = Category
def add_Category(self, value): self.Category.append(value)
def insert_Category(self, index, value): self.Category[index] = value
def get_Property(self): return self.Property
def set_Property(self, Property): self.Property = Property
def add_Property(self, value): self.Property.append(value)
def insert_Property(self, index, value): self.Property[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_instance(self): return self.instance
def set_instance(self, instance): self.instance = instance
def get_class(self): return self.classxx
def set_class(self, classxx): self.classxx = classxx
def export(self, outfile, level, namespace_='ovf:', name_='ProductSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ProductSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='ProductSection_Type'):
super(ProductSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ProductSection_Type')
if self.instance is not None and 'instance' not in already_processed:
already_processed.append('instance')
outfile.write(' instance=%s' % (self.gds_format_string(quote_attrib(self.instance).encode(ExternalEncoding), input_name='instance'), ))
if self.classxx is not None and 'classxx' not in already_processed:
already_processed.append('classxx')
outfile.write(' class=%s' % (self.gds_format_string(quote_attrib(self.classxx).encode(ExternalEncoding), input_name='class'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='ProductSection_Type', fromsubclass_=False):
super(ProductSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
if self.Product is not None:
self.Product.export(outfile, level, namespace_, name_='Product')
if self.Vendor is not None:
self.Vendor.export(outfile, level, namespace_, name_='Vendor')
if self.Version is not None:
self.Version.export(outfile, level, namespace_, name_='Version')
if self.FullVersion is not None:
self.FullVersion.export(outfile, level, namespace_, name_='FullVersion')
if self.ProductUrl is not None:
self.ProductUrl.export(outfile, level, namespace_, name_='ProductUrl')
if self.VendorUrl is not None:
self.VendorUrl.export(outfile, level, namespace_, name_='VendorUrl')
if self.AppUrl is not None:
self.AppUrl.export(outfile, level, namespace_, name_='AppUrl')
for Icon_ in self.Icon:
Icon_.export(outfile, level, namespace_, name_='Icon')
for Category_ in self.Category:
Category_.export(outfile, level, namespace_, name_='Category')
for Property_ in self.Property:
Property_.export(outfile, level, namespace_, name_='Property')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Product is not None or
self.Vendor is not None or
self.Version is not None or
self.FullVersion is not None or
self.ProductUrl is not None or
self.VendorUrl is not None or
self.AppUrl is not None or
self.Icon or
self.Category or
self.Property or
self.anytypeobjs_ or
super(ProductSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ProductSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.instance is not None and 'instance' not in already_processed:
already_processed.append('instance')
showIndent(outfile, level)
outfile.write('instance = "%s",\n' % (self.instance,))
if self.classxx is not None and 'classxx' not in already_processed:
already_processed.append('classxx')
showIndent(outfile, level)
outfile.write('classxx = "%s",\n' % (self.classxx,))
super(ProductSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ProductSection_Type, self).exportLiteralChildren(outfile, level, name_)
if self.Product is not None:
showIndent(outfile, level)
outfile.write('Product=model_.Msg_Type(\n')
self.Product.exportLiteral(outfile, level, name_='Product')
showIndent(outfile, level)
outfile.write('),\n')
if self.Vendor is not None:
showIndent(outfile, level)
outfile.write('Vendor=model_.Msg_Type(\n')
self.Vendor.exportLiteral(outfile, level, name_='Vendor')
showIndent(outfile, level)
outfile.write('),\n')
if self.Version is not None:
showIndent(outfile, level)
outfile.write('Version=model_.cimString(\n')
self.Version.exportLiteral(outfile, level, name_='Version')
showIndent(outfile, level)
outfile.write('),\n')
if self.FullVersion is not None:
showIndent(outfile, level)
outfile.write('FullVersion=model_.cimString(\n')
self.FullVersion.exportLiteral(outfile, level, name_='FullVersion')
showIndent(outfile, level)
outfile.write('),\n')
if self.ProductUrl is not None:
showIndent(outfile, level)
outfile.write('ProductUrl=model_.cimString(\n')
self.ProductUrl.exportLiteral(outfile, level, name_='ProductUrl')
showIndent(outfile, level)
outfile.write('),\n')
if self.VendorUrl is not None:
showIndent(outfile, level)
outfile.write('VendorUrl=model_.cimString(\n')
self.VendorUrl.exportLiteral(outfile, level, name_='VendorUrl')
showIndent(outfile, level)
outfile.write('),\n')
if self.AppUrl is not None:
showIndent(outfile, level)
outfile.write('AppUrl=model_.cimString(\n')
self.AppUrl.exportLiteral(outfile, level, name_='AppUrl')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Icon=[\n')
level += 1
for Icon_ in self.Icon:
showIndent(outfile, level)
outfile.write('model_.IconType(\n')
Icon_.exportLiteral(outfile, level, name_='IconType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('Category=[\n')
level += 1
for Category_ in self.Category:
showIndent(outfile, level)
outfile.write('model_.Msg_Type(\n')
Category_.exportLiteral(outfile, level, name_='Msg_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('Property=[\n')
level += 1
for Property_ in self.Property:
showIndent(outfile, level)
outfile.write('model_.PropertyType(\n')
Property_.exportLiteral(outfile, level, name_='PropertyType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('instance', node)
if value is not None and 'instance' not in already_processed:
already_processed.append('instance')
self.instance = value
value = find_attr_value_('class', node)
if value is not None and 'class' not in already_processed:
already_processed.append('class')
self.classxx = value
super(ProductSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Product':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Product(obj_)
elif nodeName_ == 'Vendor':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Vendor(obj_)
elif nodeName_ == 'Version':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Version(obj_)
elif nodeName_ == 'FullVersion':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_FullVersion(obj_)
elif nodeName_ == 'ProductUrl':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ProductUrl(obj_)
elif nodeName_ == 'VendorUrl':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_VendorUrl(obj_)
elif nodeName_ == 'AppUrl':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_AppUrl(obj_)
elif nodeName_ == 'Icon':
obj_ = IconType.factory()
obj_.build(child_)
self.Icon.append(obj_)
elif nodeName_ == 'Category':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.Category.append(obj_)
elif nodeName_ == 'Property':
obj_ = PropertyType.factory()
obj_.build(child_)
self.Property.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'ProductSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(ProductSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class ProductSection_Type
class PropertyConfigurationValue_Type(GeneratedsSuper):
"""Type for alternative default values for properties when
DeploymentOptionSection is usedAlternative default property
valueConfiguration from DeploymentOptionSection in which this
value is default"""
subclass = None
superclass = None
def __init__(self, configuration=None, value=None, anytypeobjs_=None):
self.configuration = _cast(None, configuration)
self.value = _cast(None, value)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if PropertyConfigurationValue_Type.subclass:
return PropertyConfigurationValue_Type.subclass(*args_, **kwargs_)
else:
return PropertyConfigurationValue_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_configuration(self): return self.configuration
def set_configuration(self, configuration): self.configuration = configuration
def get_value(self): return self.value
def set_value(self, value): self.value = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='PropertyConfigurationValue_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='PropertyConfigurationValue_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='PropertyConfigurationValue_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.configuration is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
outfile.write(' configuration=%s' % (self.gds_format_string(quote_attrib(self.configuration).encode(ExternalEncoding), input_name='configuration'), ))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
outfile.write(' value=%s' % (self.gds_format_string(quote_attrib(self.value).encode(ExternalEncoding), input_name='value'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='PropertyConfigurationValue_Type', fromsubclass_=False):
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='PropertyConfigurationValue_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.configuration is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
showIndent(outfile, level)
outfile.write('configuration = "%s",\n' % (self.configuration,))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
showIndent(outfile, level)
outfile.write('value = "%s",\n' % (self.value,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('configuration', node)
if value is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
self.configuration = value
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.append('value')
self.value = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'PropertyConfigurationValue_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class PropertyConfigurationValue_Type
class NetworkSection_Type(Section_Type):
"""Descriptions of logical networks used within the package"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Network=None, anytypeobjs_=None):
super(NetworkSection_Type, self).__init__(required, Info, )
if Network is None:
self.Network = []
else:
self.Network = Network
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if NetworkSection_Type.subclass:
return NetworkSection_Type.subclass(*args_, **kwargs_)
else:
return NetworkSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Network(self): return self.Network
def set_Network(self, Network): self.Network = Network
def add_Network(self, value): self.Network.append(value)
def insert_Network(self, index, value): self.Network[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='NetworkSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='NetworkSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='NetworkSection_Type'):
super(NetworkSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='NetworkSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='NetworkSection_Type', fromsubclass_=False):
super(NetworkSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Network_ in self.Network:
Network_.export(outfile, level, namespace_, name_='Network')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Network or
self.anytypeobjs_ or
super(NetworkSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='NetworkSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(NetworkSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(NetworkSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Network=[\n')
level += 1
for Network_ in self.Network:
showIndent(outfile, level)
outfile.write('model_.NetworkType(\n')
Network_.exportLiteral(outfile, level, name_='NetworkType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(NetworkSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Network':
obj_ = NetworkType.factory()
obj_.build(child_)
self.Network.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'NetworkSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(NetworkSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class NetworkSection_Type
class DiskSection_Type(Section_Type):
"""Descriptions of virtual disks used within the package"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Disk=None, anytypeobjs_=None):
super(DiskSection_Type, self).__init__(required, Info, )
if Disk is None:
self.Disk = []
else:
self.Disk = Disk
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if DiskSection_Type.subclass:
return DiskSection_Type.subclass(*args_, **kwargs_)
else:
return DiskSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Disk(self): return self.Disk
def set_Disk(self, Disk): self.Disk = Disk
def add_Disk(self, value): self.Disk.append(value)
def insert_Disk(self, index, value): self.Disk[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='DiskSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DiskSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='DiskSection_Type'):
super(DiskSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='DiskSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='DiskSection_Type', fromsubclass_=False):
super(DiskSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Disk_ in self.Disk:
Disk_.export(outfile, level, namespace_, name_='Disk')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Disk or
self.anytypeobjs_ or
super(DiskSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='DiskSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(DiskSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(DiskSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Disk=[\n')
level += 1
for Disk_ in self.Disk:
showIndent(outfile, level)
outfile.write('model_.VirtualDiskDesc_Type(\n')
Disk_.exportLiteral(outfile, level, name_='VirtualDiskDesc_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(DiskSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Disk':
obj_ = VirtualDiskDesc_Type.factory()
obj_.build(child_)
self.Disk.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'DiskSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(DiskSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class DiskSection_Type
class VirtualDiskDesc_Type(GeneratedsSuper):
"""Type for virtual disk descriptorIdentifier for virtual diskReference
to virtual disk content. If not specified a blank virtual disk
is created of size given by capacity attributeVirtual disk
capacity, can be specified as either an xs:long size or as a
reference to a property using ${property_name}. Unit of
allocation for ovf:capacity. If not specified default value is
bytes. Value shall match a recognized value for the UNITS
qualifier in DSP0004.Format of virtual disk given as a URI that
identifies the disk typeEstimated populated size of disk in
bytesReference to potential parent disk"""
subclass = None
superclass = None
def __init__(self, capacityAllocationUnits='byte', capacity=None, format=None, parentRef=None, fileRef=None, populatedSize=None, diskId=None, anytypeobjs_=None):
self.capacityAllocationUnits = _cast(None, capacityAllocationUnits)
self.capacity = _cast(None, capacity)
self.format = _cast(None, format)
self.parentRef = _cast(None, parentRef)
self.fileRef = _cast(None, fileRef)
self.populatedSize = _cast(int, populatedSize)
self.diskId = _cast(None, diskId)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if VirtualDiskDesc_Type.subclass:
return VirtualDiskDesc_Type.subclass(*args_, **kwargs_)
else:
return VirtualDiskDesc_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_capacityAllocationUnits(self): return self.capacityAllocationUnits
def set_capacityAllocationUnits(self, capacityAllocationUnits): self.capacityAllocationUnits = capacityAllocationUnits
def get_capacity(self): return self.capacity
def set_capacity(self, capacity): self.capacity = capacity
def get_format(self): return self.format
def set_format(self, format): self.format = format
def get_parentRef(self): return self.parentRef
def set_parentRef(self, parentRef): self.parentRef = parentRef
def get_fileRef(self): return self.fileRef
def set_fileRef(self, fileRef): self.fileRef = fileRef
def get_populatedSize(self): return self.populatedSize
def set_populatedSize(self, populatedSize): self.populatedSize = populatedSize
def get_diskId(self): return self.diskId
def set_diskId(self, diskId): self.diskId = diskId
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='VirtualDiskDesc_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualDiskDesc_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='VirtualDiskDesc_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.capacityAllocationUnits is not None and 'capacityAllocationUnits' not in already_processed:
already_processed.append('capacityAllocationUnits')
outfile.write(' capacityAllocationUnits=%s' % (self.gds_format_string(quote_attrib(self.capacityAllocationUnits).encode(ExternalEncoding), input_name='capacityAllocationUnits'), ))
if self.capacity is not None and 'capacity' not in already_processed:
already_processed.append('capacity')
outfile.write(' capacity=%s' % (self.gds_format_string(quote_attrib(self.capacity).encode(ExternalEncoding), input_name='capacity'), ))
if self.format is not None and 'format' not in already_processed:
already_processed.append('format')
outfile.write(' format=%s' % (self.gds_format_string(quote_attrib(self.format).encode(ExternalEncoding), input_name='format'), ))
if self.parentRef is not None and 'parentRef' not in already_processed:
already_processed.append('parentRef')
outfile.write(' parentRef=%s' % (self.gds_format_string(quote_attrib(self.parentRef).encode(ExternalEncoding), input_name='parentRef'), ))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
outfile.write(' fileRef=%s' % (self.gds_format_string(quote_attrib(self.fileRef).encode(ExternalEncoding), input_name='fileRef'), ))
if self.populatedSize is not None and 'populatedSize' not in already_processed:
already_processed.append('populatedSize')
outfile.write(' populatedSize="%s"' % self.gds_format_integer(self.populatedSize, input_name='populatedSize'))
if self.diskId is not None and 'diskId' not in already_processed:
already_processed.append('diskId')
outfile.write(' diskId=%s' % (self.gds_format_string(quote_attrib(self.diskId).encode(ExternalEncoding), input_name='diskId'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='VirtualDiskDesc_Type', fromsubclass_=False):
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VirtualDiskDesc_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.capacityAllocationUnits is not None and 'capacityAllocationUnits' not in already_processed:
already_processed.append('capacityAllocationUnits')
showIndent(outfile, level)
outfile.write('capacityAllocationUnits = "%s",\n' % (self.capacityAllocationUnits,))
if self.capacity is not None and 'capacity' not in already_processed:
already_processed.append('capacity')
showIndent(outfile, level)
outfile.write('capacity = "%s",\n' % (self.capacity,))
if self.format is not None and 'format' not in already_processed:
already_processed.append('format')
showIndent(outfile, level)
outfile.write('format = "%s",\n' % (self.format,))
if self.parentRef is not None and 'parentRef' not in already_processed:
already_processed.append('parentRef')
showIndent(outfile, level)
outfile.write('parentRef = "%s",\n' % (self.parentRef,))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
showIndent(outfile, level)
outfile.write('fileRef = "%s",\n' % (self.fileRef,))
if self.populatedSize is not None and 'populatedSize' not in already_processed:
already_processed.append('populatedSize')
showIndent(outfile, level)
outfile.write('populatedSize = %d,\n' % (self.populatedSize,))
if self.diskId is not None and 'diskId' not in already_processed:
already_processed.append('diskId')
showIndent(outfile, level)
outfile.write('diskId = "%s",\n' % (self.diskId,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('capacityAllocationUnits', node)
if value is not None and 'capacityAllocationUnits' not in already_processed:
already_processed.append('capacityAllocationUnits')
self.capacityAllocationUnits = value
value = find_attr_value_('capacity', node)
if value is not None and 'capacity' not in already_processed:
already_processed.append('capacity')
self.capacity = value
value = find_attr_value_('format', node)
if value is not None and 'format' not in already_processed:
already_processed.append('format')
self.format = value
value = find_attr_value_('parentRef', node)
if value is not None and 'parentRef' not in already_processed:
already_processed.append('parentRef')
self.parentRef = value
value = find_attr_value_('fileRef', node)
if value is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
self.fileRef = value
value = find_attr_value_('populatedSize', node)
if value is not None and 'populatedSize' not in already_processed:
already_processed.append('populatedSize')
try:
self.populatedSize = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('diskId', node)
if value is not None and 'diskId' not in already_processed:
already_processed.append('diskId')
self.diskId = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'VirtualDiskDesc_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class VirtualDiskDesc_Type
class OperatingSystemSection_Type(Section_Type):
"""Specification of the operating system installed in the
guestIdentifier defined by the CIM_OperatingSystem.OsType
enumerationVersion defined by the CIM_OperatingSystem.Version
field"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, version=None, id=None, Description=None, anytypeobjs_=None):
super(OperatingSystemSection_Type, self).__init__(required, Info, )
self.version = _cast(None, version)
self.id = _cast(int, id)
self.Description = Description
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if OperatingSystemSection_Type.subclass:
return OperatingSystemSection_Type.subclass(*args_, **kwargs_)
else:
return OperatingSystemSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_version(self): return self.version
def set_version(self, version): self.version = version
def get_id(self): return self.id
def set_id(self, id): self.id = id
def export(self, outfile, level, namespace_='ovf:', name_='OperatingSystemSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='OperatingSystemSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='OperatingSystemSection_Type'):
super(OperatingSystemSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='OperatingSystemSection_Type')
if self.version is not None and 'version' not in already_processed:
already_processed.append('version')
outfile.write(' version=%s' % (self.gds_format_string(quote_attrib(self.version).encode(ExternalEncoding), input_name='version'), ))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id="%s"' % self.gds_format_integer(self.id, input_name='id'))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='OperatingSystemSection_Type', fromsubclass_=False):
super(OperatingSystemSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Description is not None or
self.anytypeobjs_ or
super(OperatingSystemSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='OperatingSystemSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.version is not None and 'version' not in already_processed:
already_processed.append('version')
showIndent(outfile, level)
outfile.write('version = "%s",\n' % (self.version,))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = %d,\n' % (self.id,))
super(OperatingSystemSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(OperatingSystemSection_Type, self).exportLiteralChildren(outfile, level, name_)
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Msg_Type(\n')
self.Description.exportLiteral(outfile, level, name_='Description')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('version', node)
if value is not None and 'version' not in already_processed:
already_processed.append('version')
self.version = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
try:
self.id = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
super(OperatingSystemSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Description':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Description(obj_)
else:
obj_ = self.gds_build_any(child_, 'OperatingSystemSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(OperatingSystemSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class OperatingSystemSection_Type
class EulaSection_Type(Section_Type):
"""End-User License Agreement"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, License=None, anytypeobjs_=None):
super(EulaSection_Type, self).__init__(required, Info, )
self.License = License
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if EulaSection_Type.subclass:
return EulaSection_Type.subclass(*args_, **kwargs_)
else:
return EulaSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_License(self): return self.License
def set_License(self, License): self.License = License
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='EulaSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='EulaSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='EulaSection_Type'):
super(EulaSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='EulaSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='EulaSection_Type', fromsubclass_=False):
super(EulaSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
if self.License is not None:
self.License.export(outfile, level, namespace_, name_='License', )
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.License is not None or
self.anytypeobjs_ or
super(EulaSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='EulaSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(EulaSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(EulaSection_Type, self).exportLiteralChildren(outfile, level, name_)
if self.License is not None:
showIndent(outfile, level)
outfile.write('License=model_.Msg_Type(\n')
self.License.exportLiteral(outfile, level, name_='License')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(EulaSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'License':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_License(obj_)
else:
obj_ = self.gds_build_any(child_, 'EulaSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(EulaSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class EulaSection_Type
class VirtualHardwareSection_Type(Section_Type):
"""Specifies virtual hardware requirements for a virtual machineUnique
identifier of this VirtualHardwareSection (within a
VirtualSystem)"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, id='', transport=None, System=None, Item=None, anytypeobjs_=None):
super(VirtualHardwareSection_Type, self).__init__(required, Info, )
self.id = _cast(None, id)
self.transport = _cast(None, transport)
self.System = System
if Item is None:
self.Item = []
else:
self.Item = Item
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if VirtualHardwareSection_Type.subclass:
return VirtualHardwareSection_Type.subclass(*args_, **kwargs_)
else:
return VirtualHardwareSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_System(self): return self.System
def set_System(self, System): self.System = System
def get_Item(self): return self.Item
def set_Item(self, Item): self.Item = Item
def add_Item(self, value): self.Item.append(value)
def insert_Item(self, index, value): self.Item[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_transport(self): return self.transport
def set_transport(self, transport): self.transport = transport
def export(self, outfile, level, namespace_='ovf:', name_='VirtualHardwareSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualHardwareSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='VirtualHardwareSection_Type'):
super(VirtualHardwareSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='VirtualHardwareSection_Type')
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
if self.transport is not None and 'transport' not in already_processed:
already_processed.append('transport')
outfile.write(' transport=%s' % (self.gds_format_string(quote_attrib(self.transport).encode(ExternalEncoding), input_name='transport'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='VirtualHardwareSection_Type', fromsubclass_=False):
super(VirtualHardwareSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
if self.System is not None:
self.System.export(outfile, level, namespace_, name_='System')
for Item_ in self.Item:
Item_.export(outfile, level, namespace_, name_='Item')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.System is not None or
self.Item or
self.anytypeobjs_ or
super(VirtualHardwareSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VirtualHardwareSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
if self.transport is not None and 'transport' not in already_processed:
already_processed.append('transport')
showIndent(outfile, level)
outfile.write('transport = "%s",\n' % (self.transport,))
super(VirtualHardwareSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(VirtualHardwareSection_Type, self).exportLiteralChildren(outfile, level, name_)
if self.System is not None:
showIndent(outfile, level)
outfile.write('System=model_.VSSD_Type(\n')
self.System.exportLiteral(outfile, level, name_='System')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Item=[\n')
level += 1
for Item_ in self.Item:
showIndent(outfile, level)
outfile.write('model_.RASD_Type(\n')
Item_.exportLiteral(outfile, level, name_='RASD_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
value = find_attr_value_('transport', node)
if value is not None and 'transport' not in already_processed:
already_processed.append('transport')
self.transport = value
super(VirtualHardwareSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'System':
obj_ = VSSD_Type.factory()
obj_.build(child_)
self.set_System(obj_)
elif nodeName_ == 'Item':
obj_ = RASD_Type.factory()
obj_.build(child_)
self.Item.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'VirtualHardwareSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(VirtualHardwareSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class VirtualHardwareSection_Type
class ResourceAllocationSection_Type(Section_Type):
"""Resource constraints on a VirtualSystemCollection"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Item=None, anytypeobjs_=None):
super(ResourceAllocationSection_Type, self).__init__(required, Info, )
if Item is None:
self.Item = []
else:
self.Item = Item
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if ResourceAllocationSection_Type.subclass:
return ResourceAllocationSection_Type.subclass(*args_, **kwargs_)
else:
return ResourceAllocationSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Item(self): return self.Item
def set_Item(self, Item): self.Item = Item
def add_Item(self, value): self.Item.append(value)
def insert_Item(self, index, value): self.Item[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='ResourceAllocationSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ResourceAllocationSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='ResourceAllocationSection_Type'):
super(ResourceAllocationSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='ResourceAllocationSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='ResourceAllocationSection_Type', fromsubclass_=False):
super(ResourceAllocationSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Item_ in self.Item:
Item_.export(outfile, level, namespace_, name_='Item')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Item or
self.anytypeobjs_ or
super(ResourceAllocationSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ResourceAllocationSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(ResourceAllocationSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(ResourceAllocationSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Item=[\n')
level += 1
for Item_ in self.Item:
showIndent(outfile, level)
outfile.write('model_.RASD_Type(\n')
Item_.exportLiteral(outfile, level, name_='RASD_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(ResourceAllocationSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Item':
obj_ = RASD_Type.factory()
obj_.build(child_)
self.Item.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'ResourceAllocationSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(ResourceAllocationSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class ResourceAllocationSection_Type
class InstallSection_Type(Section_Type):
"""If present indicates that the virtual machine needs to be initially
booted to install and configure the softwareDelay in seconds to
wait for power off to complete after initial boot"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, initialBootStopDelay=0, anytypeobjs_=None):
super(InstallSection_Type, self).__init__(required, Info, )
self.initialBootStopDelay = _cast(int, initialBootStopDelay)
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if InstallSection_Type.subclass:
return InstallSection_Type.subclass(*args_, **kwargs_)
else:
return InstallSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_initialBootStopDelay(self): return self.initialBootStopDelay
def set_initialBootStopDelay(self, initialBootStopDelay): self.initialBootStopDelay = initialBootStopDelay
def export(self, outfile, level, namespace_='ovf:', name_='InstallSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='InstallSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='InstallSection_Type'):
super(InstallSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='InstallSection_Type')
if self.initialBootStopDelay is not None and 'initialBootStopDelay' not in already_processed:
already_processed.append('initialBootStopDelay')
outfile.write(' initialBootStopDelay="%s"' % self.gds_format_integer(self.initialBootStopDelay, input_name='initialBootStopDelay'))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='InstallSection_Type', fromsubclass_=False):
super(InstallSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_ or
super(InstallSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='InstallSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.initialBootStopDelay is not None and 'initialBootStopDelay' not in already_processed:
already_processed.append('initialBootStopDelay')
showIndent(outfile, level)
outfile.write('initialBootStopDelay = %d,\n' % (self.initialBootStopDelay,))
super(InstallSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(InstallSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('initialBootStopDelay', node)
if value is not None and 'initialBootStopDelay' not in already_processed:
already_processed.append('initialBootStopDelay')
try:
self.initialBootStopDelay = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
super(InstallSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'InstallSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(InstallSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class InstallSection_Type
class StartupSection_Type(Section_Type):
"""Specifies the order in which entities in a VirtualSystemCollection
are powered on and shut down"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Item=None, anytypeobjs_=None):
super(StartupSection_Type, self).__init__(required, Info, )
if Item is None:
self.Item = []
else:
self.Item = Item
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if StartupSection_Type.subclass:
return StartupSection_Type.subclass(*args_, **kwargs_)
else:
return StartupSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Item(self): return self.Item
def set_Item(self, Item): self.Item = Item
def add_Item(self, value): self.Item.append(value)
def insert_Item(self, index, value): self.Item[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='StartupSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='StartupSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='StartupSection_Type'):
super(StartupSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='StartupSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='StartupSection_Type', fromsubclass_=False):
super(StartupSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Item_ in self.Item:
Item_.export(outfile, level, namespace_, name_='Item')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Item or
self.anytypeobjs_ or
super(StartupSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='StartupSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(StartupSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(StartupSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Item=[\n')
level += 1
for Item_ in self.Item:
showIndent(outfile, level)
outfile.write('model_.ItemType(\n')
Item_.exportLiteral(outfile, level, name_='ItemType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(StartupSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Item':
obj_ = ItemType.factory()
obj_.build(child_)
self.Item.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'StartupSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(StartupSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class StartupSection_Type
class DeploymentOptionSection_Type(Section_Type):
"""Enumeration of discrete deployment options"""
subclass = None
superclass = Section_Type
def __init__(self, required=None, Info=None, Configuration=None, anytypeobjs_=None):
super(DeploymentOptionSection_Type, self).__init__(required, Info, )
if Configuration is None:
self.Configuration = []
else:
self.Configuration = Configuration
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if DeploymentOptionSection_Type.subclass:
return DeploymentOptionSection_Type.subclass(*args_, **kwargs_)
else:
return DeploymentOptionSection_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Configuration(self): return self.Configuration
def set_Configuration(self, Configuration): self.Configuration = Configuration
def add_Configuration(self, value): self.Configuration.append(value)
def insert_Configuration(self, index, value): self.Configuration[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def export(self, outfile, level, namespace_='ovf:', name_='DeploymentOptionSection_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='DeploymentOptionSection_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='DeploymentOptionSection_Type'):
super(DeploymentOptionSection_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='DeploymentOptionSection_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='DeploymentOptionSection_Type', fromsubclass_=False):
super(DeploymentOptionSection_Type, self).exportChildren(outfile, level, namespace_, name_, True)
for Configuration_ in self.Configuration:
Configuration_.export(outfile, level, namespace_, name_='Configuration')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Configuration or
self.anytypeobjs_ or
super(DeploymentOptionSection_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='DeploymentOptionSection_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(DeploymentOptionSection_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(DeploymentOptionSection_Type, self).exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('Configuration=[\n')
level += 1
for Configuration_ in self.Configuration:
showIndent(outfile, level)
outfile.write('model_.ConfigurationType(\n')
Configuration_.exportLiteral(outfile, level, name_='ConfigurationType')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(DeploymentOptionSection_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Configuration':
obj_ = ConfigurationType.factory()
obj_.build(child_)
self.Configuration.append(obj_)
else:
obj_ = self.gds_build_any(child_, 'DeploymentOptionSection_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
super(DeploymentOptionSection_Type, self).buildChildren(child_, node, nodeName_, True)
# end class DeploymentOptionSection_Type
class cimDateTime(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, CIM_DateTime=None, Interval=None, Date=None, Time=None, Datetime=None):
self.CIM_DateTime = CIM_DateTime
self.Interval = Interval
self.Date = Date
self.Time = Time
self.Datetime = Datetime
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimDateTime.subclass:
return cimDateTime.subclass(*args_, **kwargs_)
else:
return cimDateTime(*args_, **kwargs_)
factory = staticmethod(factory)
def get_CIM_DateTime(self): return self.CIM_DateTime
def set_CIM_DateTime(self, CIM_DateTime): self.CIM_DateTime = CIM_DateTime
def get_Interval(self): return self.Interval
def set_Interval(self, Interval): self.Interval = Interval
def get_Date(self): return self.Date
def set_Date(self, Date): self.Date = Date
def get_Time(self): return self.Time
def set_Time(self, Time): self.Time = Time
def get_Datetime(self): return self.Datetime
def set_Datetime(self, Datetime): self.Datetime = Datetime
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimDateTime', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimDateTime')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimDateTime'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimDateTime', fromsubclass_=False):
if self.CIM_DateTime is not None:
showIndent(outfile, level)
outfile.write('<%sCIM_DateTime>%s</%sCIM_DateTime>\n' % (namespace_, self.gds_format_string(quote_xml(self.CIM_DateTime).encode(ExternalEncoding), input_name='CIM_DateTime'), namespace_))
if self.Interval is not None:
showIndent(outfile, level)
outfile.write('<%sInterval>%s</%sInterval>\n' % (namespace_, self.gds_format_string(quote_xml(self.Interval).encode(ExternalEncoding), input_name='Interval'), namespace_))
if self.Date is not None:
showIndent(outfile, level)
outfile.write('<%sDate>%s</%sDate>\n' % (namespace_, self.gds_format_string(quote_xml(self.Date).encode(ExternalEncoding), input_name='Date'), namespace_))
if self.Time is not None:
showIndent(outfile, level)
outfile.write('<%sTime>%s</%sTime>\n' % (namespace_, self.gds_format_string(quote_xml(self.Time).encode(ExternalEncoding), input_name='Time'), namespace_))
if self.Datetime is not None:
showIndent(outfile, level)
outfile.write('<%sDatetime>%s</%sDatetime>\n' % (namespace_, self.gds_format_string(quote_xml(self.Datetime).encode(ExternalEncoding), input_name='Datetime'), namespace_))
def hasContent_(self):
if (
self.CIM_DateTime is not None or
self.Interval is not None or
self.Date is not None or
self.Time is not None or
self.Datetime is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimDateTime'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.CIM_DateTime is not None:
showIndent(outfile, level)
outfile.write('CIM_DateTime=%s,\n' % quote_python(self.CIM_DateTime).encode(ExternalEncoding))
if self.Interval is not None:
showIndent(outfile, level)
outfile.write('Interval=%s,\n' % quote_python(self.Interval).encode(ExternalEncoding))
if self.Date is not None:
showIndent(outfile, level)
outfile.write('Date=%s,\n' % quote_python(self.Date).encode(ExternalEncoding))
if self.Time is not None:
showIndent(outfile, level)
outfile.write('Time=%s,\n' % quote_python(self.Time).encode(ExternalEncoding))
if self.Datetime is not None:
showIndent(outfile, level)
outfile.write('Datetime=%s,\n' % quote_python(self.Datetime).encode(ExternalEncoding))
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'CIM_DateTime':
CIM_DateTime_ = child_.text
CIM_DateTime_ = self.gds_validate_string(CIM_DateTime_, node, 'CIM_DateTime')
self.CIM_DateTime = CIM_DateTime_
elif nodeName_ == 'Interval':
Interval_ = child_.text
Interval_ = self.gds_validate_string(Interval_, node, 'Interval')
self.Interval = Interval_
elif nodeName_ == 'Date':
Date_ = child_.text
Date_ = self.gds_validate_string(Date_, node, 'Date')
self.Date = Date_
elif nodeName_ == 'Time':
Time_ = child_.text
Time_ = self.gds_validate_string(Time_, node, 'Time')
self.Time = Time_
elif nodeName_ == 'Datetime':
Datetime_ = child_.text
Datetime_ = self.gds_validate_string(Datetime_, node, 'Datetime')
self.Datetime = Datetime_
# end class cimDateTime
class cimUnsignedByte(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimUnsignedByte.subclass:
return cimUnsignedByte.subclass(*args_, **kwargs_)
else:
return cimUnsignedByte(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimUnsignedByte', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimUnsignedByte')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimUnsignedByte'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimUnsignedByte', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimUnsignedByte'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimUnsignedByte
class cimByte(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimByte.subclass:
return cimByte.subclass(*args_, **kwargs_)
else:
return cimByte(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimByte', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimByte')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimByte'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimByte', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimByte'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimByte
class cimUnsignedShort(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimUnsignedShort.subclass:
return cimUnsignedShort.subclass(*args_, **kwargs_)
else:
return cimUnsignedShort(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimUnsignedShort', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimUnsignedShort')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimUnsignedShort'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimUnsignedShort', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimUnsignedShort'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimUnsignedShort
class cimShort(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimShort.subclass:
return cimShort.subclass(*args_, **kwargs_)
else:
return cimShort(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimShort', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimShort')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimShort'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimShort', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimShort'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimShort
class cimUnsignedInt(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None, extensiontype_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if cimUnsignedInt.subclass:
return cimUnsignedInt.subclass(*args_, **kwargs_)
else:
return cimUnsignedInt(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='cimUnsignedInt', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimUnsignedInt')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimUnsignedInt'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimUnsignedInt', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimUnsignedInt'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimUnsignedInt
class cimInt(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimInt.subclass:
return cimInt.subclass(*args_, **kwargs_)
else:
return cimInt(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimInt', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimInt')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimInt'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimInt', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimInt'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimInt
class cimUnsignedLong(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimUnsignedLong.subclass:
return cimUnsignedLong.subclass(*args_, **kwargs_)
else:
return cimUnsignedLong(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimUnsignedLong', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimUnsignedLong')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimUnsignedLong'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimUnsignedLong', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimUnsignedLong'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimUnsignedLong
class cimLong(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None, extensiontype_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if cimLong.subclass:
return cimLong.subclass(*args_, **kwargs_)
else:
return cimLong(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='cimLong', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimLong')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimLong'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimLong', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimLong'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimLong
class cimString(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None, extensiontype_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if cimString.subclass:
return cimString.subclass(*args_, **kwargs_)
else:
return cimString(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='cimString', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimString')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimString'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimString', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimString'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimString
class cimBoolean(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None, extensiontype_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if cimBoolean.subclass:
return cimBoolean.subclass(*args_, **kwargs_)
else:
return cimBoolean(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='cimBoolean', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimBoolean')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimBoolean'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimBoolean', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimBoolean'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimBoolean
class cimFloat(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimFloat.subclass:
return cimFloat.subclass(*args_, **kwargs_)
else:
return cimFloat(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimFloat', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimFloat')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimFloat'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimFloat', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimFloat'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimFloat
class cimDouble(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimDouble.subclass:
return cimDouble.subclass(*args_, **kwargs_)
else:
return cimDouble(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimDouble', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimDouble')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimDouble'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimDouble', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimDouble'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimDouble
class cimChar16(cimString):
subclass = None
superclass = cimString
def __init__(self, valueOf_=None):
super(cimChar16, self).__init__(valueOf_, )
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimChar16.subclass:
return cimChar16.subclass(*args_, **kwargs_)
else:
return cimChar16(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimChar16', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimChar16')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimChar16'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
super(cimChar16, self).exportAttributes(outfile, level, already_processed, namespace_, name_='cimChar16')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimChar16', fromsubclass_=False):
super(cimChar16, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(cimChar16, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimChar16'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
super(cimChar16, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(cimChar16, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
super(cimChar16, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimChar16
class cimBase64Binary(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimBase64Binary.subclass:
return cimBase64Binary.subclass(*args_, **kwargs_)
else:
return cimBase64Binary(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimBase64Binary', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimBase64Binary')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimBase64Binary'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimBase64Binary', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimBase64Binary'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimBase64Binary
class cimReference(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, anytypeobjs_=None):
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimReference.subclass:
return cimReference.subclass(*args_, **kwargs_)
else:
return cimReference(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimReference', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimReference')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimReference'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimReference', fromsubclass_=False):
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimReference'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
obj_ = self.gds_build_any(child_, 'cimReference')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class cimReference
class cimHexBinary(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimHexBinary.subclass:
return cimHexBinary.subclass(*args_, **kwargs_)
else:
return cimHexBinary(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimHexBinary', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimHexBinary')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimHexBinary'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimHexBinary', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimHexBinary'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimHexBinary
class cimAnySimpleType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, valueOf_=None):
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if cimAnySimpleType.subclass:
return cimAnySimpleType.subclass(*args_, **kwargs_)
else:
return cimAnySimpleType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='cimAnySimpleType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='cimAnySimpleType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='cimAnySimpleType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='cimAnySimpleType', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='cimAnySimpleType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class cimAnySimpleType
class qualifierString(cimString):
subclass = None
superclass = cimString
def __init__(self, qualifier=None, valueOf_=None, extensiontype_=None):
super(qualifierString, self).__init__(valueOf_, extensiontype_, )
self.qualifier = _cast(None, qualifier)
self.valueOf_ = valueOf_
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if qualifierString.subclass:
return qualifierString.subclass(*args_, **kwargs_)
else:
return qualifierString(*args_, **kwargs_)
factory = staticmethod(factory)
def get_qualifier(self): return self.qualifier
def set_qualifier(self, qualifier): self.qualifier = qualifier
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='qualifierString', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierString')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='qualifierString'):
super(qualifierString, self).exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierString')
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
outfile.write(' qualifier=%s' % (self.gds_format_string(quote_attrib(self.qualifier).encode(ExternalEncoding), input_name='qualifier'), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespace_='ovf:', name_='qualifierString', fromsubclass_=False):
super(qualifierString, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(qualifierString, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='qualifierString'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
showIndent(outfile, level)
outfile.write('qualifier = "%s",\n' % (self.qualifier,))
super(qualifierString, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(qualifierString, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('qualifier', node)
if value is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
self.qualifier = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
super(qualifierString, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qualifierString
class qualifierBoolean(cimBoolean):
subclass = None
superclass = cimBoolean
def __init__(self, qualifier=None, valueOf_=None):
super(qualifierBoolean, self).__init__(valueOf_, )
self.qualifier = _cast(None, qualifier)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if qualifierBoolean.subclass:
return qualifierBoolean.subclass(*args_, **kwargs_)
else:
return qualifierBoolean(*args_, **kwargs_)
factory = staticmethod(factory)
def get_qualifier(self): return self.qualifier
def set_qualifier(self, qualifier): self.qualifier = qualifier
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def export(self, outfile, level, namespace_='ovf:', name_='qualifierBoolean', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierBoolean')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='qualifierBoolean'):
super(qualifierBoolean, self).exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierBoolean')
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
outfile.write(' qualifier=%s' % (self.gds_format_string(quote_attrib(self.qualifier).encode(ExternalEncoding), input_name='qualifier'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='qualifierBoolean', fromsubclass_=False):
super(qualifierBoolean, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(qualifierBoolean, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='qualifierBoolean'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
showIndent(outfile, level)
outfile.write('qualifier = "%s",\n' % (self.qualifier,))
super(qualifierBoolean, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(qualifierBoolean, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('qualifier', node)
if value is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
self.qualifier = value
super(qualifierBoolean, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qualifierBoolean
class qualifierUInt32(cimUnsignedInt):
subclass = None
superclass = cimUnsignedInt
def __init__(self, qualifier=None, valueOf_=None):
super(qualifierUInt32, self).__init__(valueOf_, )
self.qualifier = _cast(None, qualifier)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if qualifierUInt32.subclass:
return qualifierUInt32.subclass(*args_, **kwargs_)
else:
return qualifierUInt32(*args_, **kwargs_)
factory = staticmethod(factory)
def get_qualifier(self): return self.qualifier
def set_qualifier(self, qualifier): self.qualifier = qualifier
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def export(self, outfile, level, namespace_='ovf:', name_='qualifierUInt32', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierUInt32')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='qualifierUInt32'):
super(qualifierUInt32, self).exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierUInt32')
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
outfile.write(' qualifier=%s' % (self.gds_format_string(quote_attrib(self.qualifier).encode(ExternalEncoding), input_name='qualifier'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='qualifierUInt32', fromsubclass_=False):
super(qualifierUInt32, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(qualifierUInt32, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='qualifierUInt32'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
showIndent(outfile, level)
outfile.write('qualifier = "%s",\n' % (self.qualifier,))
super(qualifierUInt32, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(qualifierUInt32, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('qualifier', node)
if value is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
self.qualifier = value
super(qualifierUInt32, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qualifierUInt32
class qualifierSInt64(cimLong):
subclass = None
superclass = cimLong
def __init__(self, qualifier=None, valueOf_=None):
super(qualifierSInt64, self).__init__(valueOf_, )
self.qualifier = _cast(None, qualifier)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if qualifierSInt64.subclass:
return qualifierSInt64.subclass(*args_, **kwargs_)
else:
return qualifierSInt64(*args_, **kwargs_)
factory = staticmethod(factory)
def get_qualifier(self): return self.qualifier
def set_qualifier(self, qualifier): self.qualifier = qualifier
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def export(self, outfile, level, namespace_='ovf:', name_='qualifierSInt64', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierSInt64')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='qualifierSInt64'):
super(qualifierSInt64, self).exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierSInt64')
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
outfile.write(' qualifier=%s' % (self.gds_format_string(quote_attrib(self.qualifier).encode(ExternalEncoding), input_name='qualifier'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='qualifierSInt64', fromsubclass_=False):
super(qualifierSInt64, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(qualifierSInt64, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='qualifierSInt64'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.qualifier is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
showIndent(outfile, level)
outfile.write('qualifier = "%s",\n' % (self.qualifier,))
super(qualifierSInt64, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(qualifierSInt64, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('qualifier', node)
if value is not None and 'qualifier' not in already_processed:
already_processed.append('qualifier')
self.qualifier = value
super(qualifierSInt64, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class qualifierSInt64
class qualifierSArray(qualifierString):
subclass = None
superclass = qualifierString
def __init__(self, qualifier=None):
super(qualifierSArray, self).__init__(qualifier, )
pass
def factory(*args_, **kwargs_):
if qualifierSArray.subclass:
return qualifierSArray.subclass(*args_, **kwargs_)
else:
return qualifierSArray(*args_, **kwargs_)
factory = staticmethod(factory)
def export(self, outfile, level, namespace_='ovf:', name_='qualifierSArray', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierSArray')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='qualifierSArray'):
super(qualifierSArray, self).exportAttributes(outfile, level, already_processed, namespace_, name_='qualifierSArray')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='qualifierSArray', fromsubclass_=False):
super(qualifierSArray, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
super(qualifierSArray, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='qualifierSArray'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
super(qualifierSArray, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(qualifierSArray, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
super(qualifierSArray, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(qualifierSArray, self).buildChildren(child_, node, nodeName_, True)
pass
# end class qualifierSArray
class Caption(cimString):
subclass = None
superclass = cimString
def __init__(self, valueOf_=None):
super(Caption, self).__init__(valueOf_, )
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if Caption.subclass:
return Caption.subclass(*args_, **kwargs_)
else:
return Caption(*args_, **kwargs_)
factory = staticmethod(factory)
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='Caption', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='Caption')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='Caption'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
super(Caption, self).exportAttributes(outfile, level, already_processed, namespace_, name_='Caption')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='Caption', fromsubclass_=False):
super(Caption, self).exportChildren(outfile, level, namespace_, name_, True)
pass
def hasContent_(self):
if (
self.valueOf_ or
super(Caption, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='Caption'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
super(Caption, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(Caption, self).exportLiteralChildren(outfile, level, name_)
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
super(Caption, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Caption
class CIM_VirtualSystemSettingData_Type(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, AutomaticRecoveryAction=None, AutomaticShutdownAction=None, AutomaticStartupAction=None, AutomaticStartupActionDelay=None, AutomaticStartupActionSequenceNumber=None, Caption=None, ConfigurationDataRoot=None, ConfigurationFile=None, ConfigurationID=None, CreationTime=None, Description=None, ElementName=None, InstanceID=None, LogDataRoot=None, Notes=None, RecoveryFile=None, SnapshotDataRoot=None, SuspendDataRoot=None, SwapFileDataRoot=None, VirtualSystemIdentifier=None, VirtualSystemType=None, anytypeobjs_=None, extensiontype_=None):
self.AutomaticRecoveryAction = AutomaticRecoveryAction
self.AutomaticShutdownAction = AutomaticShutdownAction
self.AutomaticStartupAction = AutomaticStartupAction
self.AutomaticStartupActionDelay = AutomaticStartupActionDelay
self.AutomaticStartupActionSequenceNumber = AutomaticStartupActionSequenceNumber
self.Caption = Caption
self.ConfigurationDataRoot = ConfigurationDataRoot
self.ConfigurationFile = ConfigurationFile
self.ConfigurationID = ConfigurationID
self.CreationTime = CreationTime
self.Description = Description
self.ElementName = ElementName
self.InstanceID = InstanceID
self.LogDataRoot = LogDataRoot
if Notes is None:
self.Notes = []
else:
self.Notes = Notes
self.RecoveryFile = RecoveryFile
self.SnapshotDataRoot = SnapshotDataRoot
self.SuspendDataRoot = SuspendDataRoot
self.SwapFileDataRoot = SwapFileDataRoot
self.VirtualSystemIdentifier = VirtualSystemIdentifier
self.VirtualSystemType = VirtualSystemType
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CIM_VirtualSystemSettingData_Type.subclass:
return CIM_VirtualSystemSettingData_Type.subclass(*args_, **kwargs_)
else:
return CIM_VirtualSystemSettingData_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_AutomaticRecoveryAction(self): return self.AutomaticRecoveryAction
def set_AutomaticRecoveryAction(self, AutomaticRecoveryAction): self.AutomaticRecoveryAction = AutomaticRecoveryAction
def validate_AutomaticRecoveryAction(self, value):
# Validate type AutomaticRecoveryAction, a restriction on xs:unsignedShort.
pass
def get_AutomaticShutdownAction(self): return self.AutomaticShutdownAction
def set_AutomaticShutdownAction(self, AutomaticShutdownAction): self.AutomaticShutdownAction = AutomaticShutdownAction
def validate_AutomaticShutdownAction(self, value):
# Validate type AutomaticShutdownAction, a restriction on xs:unsignedShort.
pass
def get_AutomaticStartupAction(self): return self.AutomaticStartupAction
def set_AutomaticStartupAction(self, AutomaticStartupAction): self.AutomaticStartupAction = AutomaticStartupAction
def validate_AutomaticStartupAction(self, value):
# Validate type AutomaticStartupAction, a restriction on xs:unsignedShort.
pass
def get_AutomaticStartupActionDelay(self): return self.AutomaticStartupActionDelay
def set_AutomaticStartupActionDelay(self, AutomaticStartupActionDelay): self.AutomaticStartupActionDelay = AutomaticStartupActionDelay
def get_AutomaticStartupActionSequenceNumber(self): return self.AutomaticStartupActionSequenceNumber
def set_AutomaticStartupActionSequenceNumber(self, AutomaticStartupActionSequenceNumber): self.AutomaticStartupActionSequenceNumber = AutomaticStartupActionSequenceNumber
def get_Caption(self): return self.Caption
def set_Caption(self, Caption): self.Caption = Caption
def get_ConfigurationDataRoot(self): return self.ConfigurationDataRoot
def set_ConfigurationDataRoot(self, ConfigurationDataRoot): self.ConfigurationDataRoot = ConfigurationDataRoot
def get_ConfigurationFile(self): return self.ConfigurationFile
def set_ConfigurationFile(self, ConfigurationFile): self.ConfigurationFile = ConfigurationFile
def get_ConfigurationID(self): return self.ConfigurationID
def set_ConfigurationID(self, ConfigurationID): self.ConfigurationID = ConfigurationID
def get_CreationTime(self): return self.CreationTime
def set_CreationTime(self, CreationTime): self.CreationTime = CreationTime
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_ElementName(self): return self.ElementName
def set_ElementName(self, ElementName): self.ElementName = ElementName
def get_InstanceID(self): return self.InstanceID
def set_InstanceID(self, InstanceID): self.InstanceID = InstanceID
def get_LogDataRoot(self): return self.LogDataRoot
def set_LogDataRoot(self, LogDataRoot): self.LogDataRoot = LogDataRoot
def get_Notes(self): return self.Notes
def set_Notes(self, Notes): self.Notes = Notes
def add_Notes(self, value): self.Notes.append(value)
def insert_Notes(self, index, value): self.Notes[index] = value
def get_RecoveryFile(self): return self.RecoveryFile
def set_RecoveryFile(self, RecoveryFile): self.RecoveryFile = RecoveryFile
def get_SnapshotDataRoot(self): return self.SnapshotDataRoot
def set_SnapshotDataRoot(self, SnapshotDataRoot): self.SnapshotDataRoot = SnapshotDataRoot
def get_SuspendDataRoot(self): return self.SuspendDataRoot
def set_SuspendDataRoot(self, SuspendDataRoot): self.SuspendDataRoot = SuspendDataRoot
def get_SwapFileDataRoot(self): return self.SwapFileDataRoot
def set_SwapFileDataRoot(self, SwapFileDataRoot): self.SwapFileDataRoot = SwapFileDataRoot
def get_VirtualSystemIdentifier(self): return self.VirtualSystemIdentifier
def set_VirtualSystemIdentifier(self, VirtualSystemIdentifier): self.VirtualSystemIdentifier = VirtualSystemIdentifier
def get_VirtualSystemType(self): return self.VirtualSystemType
def set_VirtualSystemType(self, VirtualSystemType): self.VirtualSystemType = VirtualSystemType
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='CIM_VirtualSystemSettingData_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='CIM_VirtualSystemSettingData_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='CIM_VirtualSystemSettingData_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='CIM_VirtualSystemSettingData_Type', fromsubclass_=False):
if self.AutomaticRecoveryAction is not None:
showIndent(outfile, level)
outfile.write('<%sAutomaticRecoveryAction>%s</%sAutomaticRecoveryAction>\n' % (namespace_, self.gds_format_integer(self.AutomaticRecoveryAction, input_name='AutomaticRecoveryAction'), namespace_))
if self.AutomaticShutdownAction is not None:
showIndent(outfile, level)
outfile.write('<%sAutomaticShutdownAction>%s</%sAutomaticShutdownAction>\n' % (namespace_, self.gds_format_integer(self.AutomaticShutdownAction, input_name='AutomaticShutdownAction'), namespace_))
if self.AutomaticStartupAction is not None:
showIndent(outfile, level)
outfile.write('<%sAutomaticStartupAction>%s</%sAutomaticStartupAction>\n' % (namespace_, self.gds_format_integer(self.AutomaticStartupAction, input_name='AutomaticStartupAction'), namespace_))
if self.AutomaticStartupActionDelay is not None:
self.AutomaticStartupActionDelay.export(outfile, level, namespace_, name_='AutomaticStartupActionDelay')
if self.AutomaticStartupActionSequenceNumber is not None:
self.AutomaticStartupActionSequenceNumber.export(outfile, level, namespace_, name_='AutomaticStartupActionSequenceNumber')
if self.Caption is not None:
self.Caption.export(outfile, level, namespace_, name_='Caption')
if self.ConfigurationDataRoot is not None:
self.ConfigurationDataRoot.export(outfile, level, namespace_, name_='ConfigurationDataRoot')
if self.ConfigurationFile is not None:
self.ConfigurationFile.export(outfile, level, namespace_, name_='ConfigurationFile')
if self.ConfigurationID is not None:
self.ConfigurationID.export(outfile, level, namespace_, name_='ConfigurationID')
if self.CreationTime is not None:
self.CreationTime.export(outfile, level, namespace_, name_='CreationTime')
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description')
if self.ElementName is not None:
self.ElementName.export(outfile, level, namespace_, name_='ElementName', )
if self.InstanceID is not None:
self.InstanceID.export(outfile, level, namespace_, name_='InstanceID', )
if self.LogDataRoot is not None:
self.LogDataRoot.export(outfile, level, namespace_, name_='LogDataRoot')
for Notes_ in self.Notes:
Notes_.export(outfile, level, namespace_, name_='Notes')
if self.RecoveryFile is not None:
self.RecoveryFile.export(outfile, level, namespace_, name_='RecoveryFile')
if self.SnapshotDataRoot is not None:
self.SnapshotDataRoot.export(outfile, level, namespace_, name_='SnapshotDataRoot')
if self.SuspendDataRoot is not None:
self.SuspendDataRoot.export(outfile, level, namespace_, name_='SuspendDataRoot')
if self.SwapFileDataRoot is not None:
self.SwapFileDataRoot.export(outfile, level, namespace_, name_='SwapFileDataRoot')
if self.VirtualSystemIdentifier is not None:
self.VirtualSystemIdentifier.export(outfile, level, namespace_, name_='VirtualSystemIdentifier')
if self.VirtualSystemType is not None:
self.VirtualSystemType.export(outfile, level, namespace_, name_='VirtualSystemType')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.AutomaticRecoveryAction is not None or
self.AutomaticShutdownAction is not None or
self.AutomaticStartupAction is not None or
self.AutomaticStartupActionDelay is not None or
self.AutomaticStartupActionSequenceNumber is not None or
self.Caption is not None or
self.ConfigurationDataRoot is not None or
self.ConfigurationFile is not None or
self.ConfigurationID is not None or
self.CreationTime is not None or
self.Description is not None or
self.ElementName is not None or
self.InstanceID is not None or
self.LogDataRoot is not None or
self.Notes or
self.RecoveryFile is not None or
self.SnapshotDataRoot is not None or
self.SuspendDataRoot is not None or
self.SwapFileDataRoot is not None or
self.VirtualSystemIdentifier is not None or
self.VirtualSystemType is not None or
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='CIM_VirtualSystemSettingData_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.AutomaticRecoveryAction is not None:
showIndent(outfile, level)
outfile.write('AutomaticRecoveryAction=%d,\n' % self.AutomaticRecoveryAction)
if self.AutomaticShutdownAction is not None:
showIndent(outfile, level)
outfile.write('AutomaticShutdownAction=%d,\n' % self.AutomaticShutdownAction)
if self.AutomaticStartupAction is not None:
showIndent(outfile, level)
outfile.write('AutomaticStartupAction=%d,\n' % self.AutomaticStartupAction)
if self.AutomaticStartupActionDelay is not None:
showIndent(outfile, level)
outfile.write('AutomaticStartupActionDelay=model_.AutomaticStartupActionDelay(\n')
self.AutomaticStartupActionDelay.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.AutomaticStartupActionSequenceNumber is not None:
showIndent(outfile, level)
outfile.write('AutomaticStartupActionSequenceNumber=model_.AutomaticStartupActionSequenceNumber(\n')
self.AutomaticStartupActionSequenceNumber.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Caption is not None:
showIndent(outfile, level)
outfile.write('Caption=model_.Caption(\n')
self.Caption.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ConfigurationDataRoot is not None:
showIndent(outfile, level)
outfile.write('ConfigurationDataRoot=model_.ConfigurationDataRoot(\n')
self.ConfigurationDataRoot.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ConfigurationFile is not None:
showIndent(outfile, level)
outfile.write('ConfigurationFile=model_.ConfigurationFile(\n')
self.ConfigurationFile.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ConfigurationID is not None:
showIndent(outfile, level)
outfile.write('ConfigurationID=model_.ConfigurationID(\n')
self.ConfigurationID.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.CreationTime is not None:
showIndent(outfile, level)
outfile.write('CreationTime=model_.CreationTime(\n')
self.CreationTime.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Description(\n')
self.Description.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ElementName is not None:
showIndent(outfile, level)
outfile.write('ElementName=model_.ElementName(\n')
self.ElementName.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.InstanceID is not None:
showIndent(outfile, level)
outfile.write('InstanceID=model_.InstanceID(\n')
self.InstanceID.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.LogDataRoot is not None:
showIndent(outfile, level)
outfile.write('LogDataRoot=model_.LogDataRoot(\n')
self.LogDataRoot.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Notes=[\n')
level += 1
for Notes_ in self.Notes:
showIndent(outfile, level)
outfile.write('model_.Notes(\n')
Notes_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.RecoveryFile is not None:
showIndent(outfile, level)
outfile.write('RecoveryFile=model_.RecoveryFile(\n')
self.RecoveryFile.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.SnapshotDataRoot is not None:
showIndent(outfile, level)
outfile.write('SnapshotDataRoot=model_.SnapshotDataRoot(\n')
self.SnapshotDataRoot.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.SuspendDataRoot is not None:
showIndent(outfile, level)
outfile.write('SuspendDataRoot=model_.SuspendDataRoot(\n')
self.SuspendDataRoot.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.SwapFileDataRoot is not None:
showIndent(outfile, level)
outfile.write('SwapFileDataRoot=model_.SwapFileDataRoot(\n')
self.SwapFileDataRoot.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.VirtualSystemIdentifier is not None:
showIndent(outfile, level)
outfile.write('VirtualSystemIdentifier=model_.VirtualSystemIdentifier(\n')
self.VirtualSystemIdentifier.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.VirtualSystemType is not None:
showIndent(outfile, level)
outfile.write('VirtualSystemType=model_.VirtualSystemType(\n')
self.VirtualSystemType.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'AutomaticRecoveryAction':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'AutomaticRecoveryAction')
self.AutomaticRecoveryAction = ival_
self.validate_AutomaticRecoveryAction(self.AutomaticRecoveryAction) # validate type AutomaticRecoveryAction
elif nodeName_ == 'AutomaticShutdownAction':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'AutomaticShutdownAction')
self.AutomaticShutdownAction = ival_
self.validate_AutomaticShutdownAction(self.AutomaticShutdownAction) # validate type AutomaticShutdownAction
elif nodeName_ == 'AutomaticStartupAction':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'AutomaticStartupAction')
self.AutomaticStartupAction = ival_
self.validate_AutomaticStartupAction(self.AutomaticStartupAction) # validate type AutomaticStartupAction
elif nodeName_ == 'AutomaticStartupActionDelay':
obj_ = cimDateTime.factory()
obj_.build(child_)
self.set_AutomaticStartupActionDelay(obj_)
elif nodeName_ == 'AutomaticStartupActionSequenceNumber':
obj_ = cimUnsignedShort.factory()
obj_.build(child_)
self.set_AutomaticStartupActionSequenceNumber(obj_)
elif nodeName_ == 'Caption':
obj_ = Caption.factory()
obj_.build(child_)
self.set_Caption(obj_)
elif nodeName_ == 'ConfigurationDataRoot':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ConfigurationDataRoot(obj_)
elif nodeName_ == 'ConfigurationFile':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ConfigurationFile(obj_)
elif nodeName_ == 'ConfigurationID':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ConfigurationID(obj_)
elif nodeName_ == 'CreationTime':
obj_ = cimDateTime.factory()
obj_.build(child_)
self.set_CreationTime(obj_)
elif nodeName_ == 'Description':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Description(obj_)
elif nodeName_ == 'ElementName':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ElementName(obj_)
elif nodeName_ == 'InstanceID':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_InstanceID(obj_)
elif nodeName_ == 'LogDataRoot':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_LogDataRoot(obj_)
elif nodeName_ == 'Notes':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.Notes.append(obj_)
elif nodeName_ == 'RecoveryFile':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_RecoveryFile(obj_)
elif nodeName_ == 'SnapshotDataRoot':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_SnapshotDataRoot(obj_)
elif nodeName_ == 'SuspendDataRoot':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_SuspendDataRoot(obj_)
elif nodeName_ == 'SwapFileDataRoot':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_SwapFileDataRoot(obj_)
elif nodeName_ == 'VirtualSystemIdentifier':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_VirtualSystemIdentifier(obj_)
elif nodeName_ == 'VirtualSystemType':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_VirtualSystemType(obj_)
else:
obj_ = self.gds_build_any(child_, 'CIM_VirtualSystemSettingData_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class CIM_VirtualSystemSettingData_Type
class CIM_ResourceAllocationSettingData_Type(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, Address=None, AddressOnParent=None, AllocationUnits=None, AutomaticAllocation=None, AutomaticDeallocation=None, Caption=None, Connection=None, ConsumerVisibility=None, Description=None, ElementName=None, HostResource=None, InstanceID=None, Limit=None, MappingBehavior=None, OtherResourceType=None, Parent=None, PoolID=None, Reservation=None, ResourceSubType=None, ResourceType=None, VirtualQuantity=None, VirtualQuantityUnits=None, Weight=None, anytypeobjs_=None, extensiontype_=None):
self.Address = Address
self.AddressOnParent = AddressOnParent
self.AllocationUnits = AllocationUnits
self.AutomaticAllocation = AutomaticAllocation
self.AutomaticDeallocation = AutomaticDeallocation
self.Caption = Caption
if Connection is None:
self.Connection = []
else:
self.Connection = Connection
self.ConsumerVisibility = ConsumerVisibility
self.Description = Description
self.ElementName = ElementName
if HostResource is None:
self.HostResource = []
else:
self.HostResource = HostResource
self.InstanceID = InstanceID
self.Limit = Limit
self.MappingBehavior = MappingBehavior
self.OtherResourceType = OtherResourceType
self.Parent = Parent
self.PoolID = PoolID
self.Reservation = Reservation
self.ResourceSubType = ResourceSubType
self.ResourceType = ResourceType
self.VirtualQuantity = VirtualQuantity
self.VirtualQuantityUnits = VirtualQuantityUnits
self.Weight = Weight
if anytypeobjs_ is None:
self.anytypeobjs_ = []
else:
self.anytypeobjs_ = anytypeobjs_
self.anyAttributes_ = {}
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CIM_ResourceAllocationSettingData_Type.subclass:
return CIM_ResourceAllocationSettingData_Type.subclass(*args_, **kwargs_)
else:
return CIM_ResourceAllocationSettingData_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Address(self): return self.Address
def set_Address(self, Address): self.Address = Address
def get_AddressOnParent(self): return self.AddressOnParent
def set_AddressOnParent(self, AddressOnParent): self.AddressOnParent = AddressOnParent
def get_AllocationUnits(self): return self.AllocationUnits
def set_AllocationUnits(self, AllocationUnits): self.AllocationUnits = AllocationUnits
def get_AutomaticAllocation(self): return self.AutomaticAllocation
def set_AutomaticAllocation(self, AutomaticAllocation): self.AutomaticAllocation = AutomaticAllocation
def get_AutomaticDeallocation(self): return self.AutomaticDeallocation
def set_AutomaticDeallocation(self, AutomaticDeallocation): self.AutomaticDeallocation = AutomaticDeallocation
def get_Caption(self): return self.Caption
def set_Caption(self, Caption): self.Caption = Caption
def get_Connection(self): return self.Connection
def set_Connection(self, Connection): self.Connection = Connection
def add_Connection(self, value): self.Connection.append(value)
def insert_Connection(self, index, value): self.Connection[index] = value
def get_ConsumerVisibility(self): return self.ConsumerVisibility
def set_ConsumerVisibility(self, ConsumerVisibility): self.ConsumerVisibility = ConsumerVisibility
def validate_ConsumerVisibility(self, value):
# Validate type ConsumerVisibility, a restriction on xs:unsignedShort.
pass
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_ElementName(self): return self.ElementName
def set_ElementName(self, ElementName): self.ElementName = ElementName
def get_HostResource(self): return self.HostResource
def set_HostResource(self, HostResource): self.HostResource = HostResource
def add_HostResource(self, value): self.HostResource.append(value)
def insert_HostResource(self, index, value): self.HostResource[index] = value
def get_InstanceID(self): return self.InstanceID
def set_InstanceID(self, InstanceID): self.InstanceID = InstanceID
def get_Limit(self): return self.Limit
def set_Limit(self, Limit): self.Limit = Limit
def get_MappingBehavior(self): return self.MappingBehavior
def set_MappingBehavior(self, MappingBehavior): self.MappingBehavior = MappingBehavior
def validate_MappingBehavior(self, value):
# Validate type MappingBehavior, a restriction on xs:unsignedShort.
pass
def get_OtherResourceType(self): return self.OtherResourceType
def set_OtherResourceType(self, OtherResourceType): self.OtherResourceType = OtherResourceType
def get_Parent(self): return self.Parent
def set_Parent(self, Parent): self.Parent = Parent
def get_PoolID(self): return self.PoolID
def set_PoolID(self, PoolID): self.PoolID = PoolID
def get_Reservation(self): return self.Reservation
def set_Reservation(self, Reservation): self.Reservation = Reservation
def get_ResourceSubType(self): return self.ResourceSubType
def set_ResourceSubType(self, ResourceSubType): self.ResourceSubType = ResourceSubType
def get_ResourceType(self): return self.ResourceType
def set_ResourceType(self, ResourceType): self.ResourceType = ResourceType
def validate_ResourceType(self, value):
# Validate type ResourceType, a restriction on xs:unsignedShort.
pass
def get_VirtualQuantity(self): return self.VirtualQuantity
def set_VirtualQuantity(self, VirtualQuantity): self.VirtualQuantity = VirtualQuantity
def get_VirtualQuantityUnits(self): return self.VirtualQuantityUnits
def set_VirtualQuantityUnits(self, VirtualQuantityUnits): self.VirtualQuantityUnits = VirtualQuantityUnits
def get_Weight(self): return self.Weight
def set_Weight(self, Weight): self.Weight = Weight
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def add_anytypeobjs_(self, value): self.anytypeobjs_.append(value)
def insert_anytypeobjs_(self, index, value): self._anytypeobjs_[index] = value
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def export(self, outfile, level, namespace_='ovf:', name_='CIM_ResourceAllocationSettingData_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='CIM_ResourceAllocationSettingData_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='CIM_ResourceAllocationSettingData_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespace_='ovf:', name_='CIM_ResourceAllocationSettingData_Type', fromsubclass_=False):
if self.Address is not None:
self.Address.export(outfile, level, namespace_, name_='Address')
if self.AddressOnParent is not None:
self.AddressOnParent.export(outfile, level, namespace_, name_='AddressOnParent')
if self.AllocationUnits is not None:
self.AllocationUnits.export(outfile, level, namespace_, name_='AllocationUnits')
if self.AutomaticAllocation is not None:
self.AutomaticAllocation.export(outfile, level, namespace_, name_='AutomaticAllocation')
if self.AutomaticDeallocation is not None:
self.AutomaticDeallocation.export(outfile, level, namespace_, name_='AutomaticDeallocation')
if self.Caption is not None:
self.Caption.export(outfile, level, namespace_, name_='Caption')
for Connection_ in self.Connection:
Connection_.export(outfile, level, namespace_, name_='Connection')
if self.ConsumerVisibility is not None:
showIndent(outfile, level)
outfile.write('<%sConsumerVisibility>%s</%sConsumerVisibility>\n' % (namespace_, self.gds_format_integer(self.ConsumerVisibility, input_name='ConsumerVisibility'), namespace_))
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description')
if self.ElementName is not None:
self.ElementName.export(outfile, level, namespace_, name_='ElementName', )
for HostResource_ in self.HostResource:
HostResource_.export(outfile, level, namespace_, name_='HostResource')
if self.InstanceID is not None:
self.InstanceID.export(outfile, level, namespace_, name_='InstanceID', )
if self.Limit is not None:
self.Limit.export(outfile, level, namespace_, name_='Limit')
if self.MappingBehavior is not None:
showIndent(outfile, level)
outfile.write('<%sMappingBehavior>%s</%sMappingBehavior>\n' % (namespace_, self.gds_format_integer(self.MappingBehavior, input_name='MappingBehavior'), namespace_))
if self.OtherResourceType is not None:
self.OtherResourceType.export(outfile, level, namespace_, name_='OtherResourceType')
if self.Parent is not None:
self.Parent.export(outfile, level, namespace_, name_='Parent')
if self.PoolID is not None:
self.PoolID.export(outfile, level, namespace_, name_='PoolID')
if self.Reservation is not None:
self.Reservation.export(outfile, level, namespace_, name_='Reservation')
if self.ResourceSubType is not None:
self.ResourceSubType.export(outfile, level, namespace_, name_='ResourceSubType')
if self.ResourceType is not None:
showIndent(outfile, level)
outfile.write('<%sResourceType>%s</%sResourceType>\n' % (namespace_, self.gds_format_integer(self.ResourceType, input_name='ResourceType'), namespace_))
if self.VirtualQuantity is not None:
self.VirtualQuantity.export(outfile, level, namespace_, name_='VirtualQuantity')
if self.VirtualQuantityUnits is not None:
self.VirtualQuantityUnits.export(outfile, level, namespace_, name_='VirtualQuantityUnits')
if self.Weight is not None:
self.Weight.export(outfile, level, namespace_, name_='Weight')
for obj_ in self.anytypeobjs_:
obj_.export(outfile, level, namespace_)
def hasContent_(self):
if (
self.Address is not None or
self.AddressOnParent is not None or
self.AllocationUnits is not None or
self.AutomaticAllocation is not None or
self.AutomaticDeallocation is not None or
self.Caption is not None or
self.Connection or
self.ConsumerVisibility is not None or
self.Description is not None or
self.ElementName is not None or
self.HostResource or
self.InstanceID is not None or
self.Limit is not None or
self.MappingBehavior is not None or
self.OtherResourceType is not None or
self.Parent is not None or
self.PoolID is not None or
self.Reservation is not None or
self.ResourceSubType is not None or
self.ResourceType is not None or
self.VirtualQuantity is not None or
self.VirtualQuantityUnits is not None or
self.Weight is not None or
self.anytypeobjs_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='CIM_ResourceAllocationSettingData_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Address is not None:
showIndent(outfile, level)
outfile.write('Address=model_.Address(\n')
self.Address.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.AddressOnParent is not None:
showIndent(outfile, level)
outfile.write('AddressOnParent=model_.AddressOnParent(\n')
self.AddressOnParent.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.AllocationUnits is not None:
showIndent(outfile, level)
outfile.write('AllocationUnits=model_.AllocationUnits(\n')
self.AllocationUnits.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.AutomaticAllocation is not None:
showIndent(outfile, level)
outfile.write('AutomaticAllocation=model_.AutomaticAllocation(\n')
self.AutomaticAllocation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.AutomaticDeallocation is not None:
showIndent(outfile, level)
outfile.write('AutomaticDeallocation=model_.AutomaticDeallocation(\n')
self.AutomaticDeallocation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Caption is not None:
showIndent(outfile, level)
outfile.write('Caption=model_.Caption(\n')
self.Caption.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Connection=[\n')
level += 1
for Connection_ in self.Connection:
showIndent(outfile, level)
outfile.write('model_.Connection(\n')
Connection_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.ConsumerVisibility is not None:
showIndent(outfile, level)
outfile.write('ConsumerVisibility=%d,\n' % self.ConsumerVisibility)
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Description(\n')
self.Description.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ElementName is not None:
showIndent(outfile, level)
outfile.write('ElementName=model_.ElementName(\n')
self.ElementName.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('HostResource=[\n')
level += 1
for HostResource_ in self.HostResource:
showIndent(outfile, level)
outfile.write('model_.HostResource(\n')
HostResource_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.InstanceID is not None:
showIndent(outfile, level)
outfile.write('InstanceID=model_.InstanceID(\n')
self.InstanceID.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Limit is not None:
showIndent(outfile, level)
outfile.write('Limit=model_.Limit(\n')
self.Limit.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.MappingBehavior is not None:
showIndent(outfile, level)
outfile.write('MappingBehavior=%d,\n' % self.MappingBehavior)
if self.OtherResourceType is not None:
showIndent(outfile, level)
outfile.write('OtherResourceType=model_.OtherResourceType(\n')
self.OtherResourceType.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Parent is not None:
showIndent(outfile, level)
outfile.write('Parent=model_.Parent(\n')
self.Parent.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.PoolID is not None:
showIndent(outfile, level)
outfile.write('PoolID=model_.PoolID(\n')
self.PoolID.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Reservation is not None:
showIndent(outfile, level)
outfile.write('Reservation=model_.Reservation(\n')
self.Reservation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ResourceSubType is not None:
showIndent(outfile, level)
outfile.write('ResourceSubType=model_.ResourceSubType(\n')
self.ResourceSubType.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.ResourceType is not None:
showIndent(outfile, level)
outfile.write('ResourceType=%d,\n' % self.ResourceType)
if self.VirtualQuantity is not None:
showIndent(outfile, level)
outfile.write('VirtualQuantity=model_.VirtualQuantity(\n')
self.VirtualQuantity.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.VirtualQuantityUnits is not None:
showIndent(outfile, level)
outfile.write('VirtualQuantityUnits=model_.VirtualQuantityUnits(\n')
self.VirtualQuantityUnits.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Weight is not None:
showIndent(outfile, level)
outfile.write('Weight=model_.Weight(\n')
self.Weight.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('anytypeobjs_=[\n')
level += 1
for anytypeobjs_ in self.anytypeobjs_:
anytypeobjs_.exportLiteral(outfile, level)
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.append('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Address':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Address(obj_)
elif nodeName_ == 'AddressOnParent':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_AddressOnParent(obj_)
elif nodeName_ == 'AllocationUnits':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_AllocationUnits(obj_)
elif nodeName_ == 'AutomaticAllocation':
class_obj_ = self.get_class_obj_(child_, cimBoolean)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_AutomaticAllocation(obj_)
elif nodeName_ == 'AutomaticDeallocation':
class_obj_ = self.get_class_obj_(child_, cimBoolean)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_AutomaticDeallocation(obj_)
elif nodeName_ == 'Caption':
obj_ = Caption.factory()
obj_.build(child_)
self.set_Caption(obj_)
elif nodeName_ == 'Connection':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.Connection.append(obj_)
elif nodeName_ == 'ConsumerVisibility':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'ConsumerVisibility')
self.ConsumerVisibility = ival_
self.validate_ConsumerVisibility(self.ConsumerVisibility) # validate type ConsumerVisibility
elif nodeName_ == 'Description':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Description(obj_)
elif nodeName_ == 'ElementName':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ElementName(obj_)
elif nodeName_ == 'HostResource':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.HostResource.append(obj_)
elif nodeName_ == 'InstanceID':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_InstanceID(obj_)
elif nodeName_ == 'Limit':
obj_ = cimUnsignedLong.factory()
obj_.build(child_)
self.set_Limit(obj_)
elif nodeName_ == 'MappingBehavior':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'MappingBehavior')
self.MappingBehavior = ival_
self.validate_MappingBehavior(self.MappingBehavior) # validate type MappingBehavior
elif nodeName_ == 'OtherResourceType':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_OtherResourceType(obj_)
elif nodeName_ == 'Parent':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Parent(obj_)
elif nodeName_ == 'PoolID':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_PoolID(obj_)
elif nodeName_ == 'Reservation':
obj_ = cimUnsignedLong.factory()
obj_.build(child_)
self.set_Reservation(obj_)
elif nodeName_ == 'ResourceSubType':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_ResourceSubType(obj_)
elif nodeName_ == 'ResourceType':
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError), exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'ResourceType')
self.ResourceType = ival_
self.validate_ResourceType(self.ResourceType) # validate type ResourceType
elif nodeName_ == 'VirtualQuantity':
obj_ = cimUnsignedLong.factory()
obj_.build(child_)
self.set_VirtualQuantity(obj_)
elif nodeName_ == 'VirtualQuantityUnits':
class_obj_ = self.get_class_obj_(child_, cimString)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_VirtualQuantityUnits(obj_)
elif nodeName_ == 'Weight':
class_obj_ = self.get_class_obj_(child_, cimUnsignedInt)
obj_ = class_obj_.factory()
obj_.build(child_)
self.set_Weight(obj_)
else:
obj_ = self.gds_build_any(child_, 'CIM_ResourceAllocationSettingData_Type')
if obj_ is not None:
self.add_anytypeobjs_(obj_)
# end class CIM_ResourceAllocationSettingData_Type
class MsgType(GeneratedsSuper):
"""String element valueString element identifier"""
subclass = None
superclass = None
def __init__(self, msgid=None, valueOf_=None):
self.msgid = _cast(None, msgid)
self.valueOf_ = valueOf_
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if MsgType.subclass:
return MsgType.subclass(*args_, **kwargs_)
else:
return MsgType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_msgid(self): return self.msgid
def set_msgid(self, msgid): self.msgid = msgid
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='MsgType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='MsgType')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='MsgType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.msgid is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
outfile.write(' msgid=%s' % (self.gds_format_string(quote_attrib(self.msgid).encode(ExternalEncoding), input_name='msgid'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='MsgType', fromsubclass_=False):
pass
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='MsgType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.msgid is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
showIndent(outfile, level)
outfile.write('msgid = "%s",\n' % (self.msgid,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('msgid', node)
if value is not None and 'msgid' not in already_processed:
already_processed.append('msgid')
self.msgid = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class MsgType
class IconType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, mimeType=None, width=None, fileRef=None, height=None):
self.mimeType = _cast(None, mimeType)
self.width = _cast(int, width)
self.fileRef = _cast(None, fileRef)
self.height = _cast(int, height)
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if IconType.subclass:
return IconType.subclass(*args_, **kwargs_)
else:
return IconType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_mimeType(self): return self.mimeType
def set_mimeType(self, mimeType): self.mimeType = mimeType
def get_width(self): return self.width
def set_width(self, width): self.width = width
def get_fileRef(self): return self.fileRef
def set_fileRef(self, fileRef): self.fileRef = fileRef
def get_height(self): return self.height
def set_height(self, height): self.height = height
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='IconType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='IconType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='IconType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.mimeType is not None and 'mimeType' not in already_processed:
already_processed.append('mimeType')
outfile.write(' mimeType=%s' % (self.gds_format_string(quote_attrib(self.mimeType).encode(ExternalEncoding), input_name='mimeType'), ))
if self.width is not None and 'width' not in already_processed:
already_processed.append('width')
outfile.write(' width="%s"' % self.gds_format_integer(self.width, input_name='width'))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
outfile.write(' fileRef=%s' % (self.gds_format_string(quote_attrib(self.fileRef).encode(ExternalEncoding), input_name='fileRef'), ))
if self.height is not None and 'height' not in already_processed:
already_processed.append('height')
outfile.write(' height="%s"' % self.gds_format_integer(self.height, input_name='height'))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='IconType', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='IconType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.mimeType is not None and 'mimeType' not in already_processed:
already_processed.append('mimeType')
showIndent(outfile, level)
outfile.write('mimeType = "%s",\n' % (self.mimeType,))
if self.width is not None and 'width' not in already_processed:
already_processed.append('width')
showIndent(outfile, level)
outfile.write('width = %d,\n' % (self.width,))
if self.fileRef is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
showIndent(outfile, level)
outfile.write('fileRef = "%s",\n' % (self.fileRef,))
if self.height is not None and 'height' not in already_processed:
already_processed.append('height')
showIndent(outfile, level)
outfile.write('height = %d,\n' % (self.height,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('mimeType', node)
if value is not None and 'mimeType' not in already_processed:
already_processed.append('mimeType')
self.mimeType = value
value = find_attr_value_('width', node)
if value is not None and 'width' not in already_processed:
already_processed.append('width')
try:
self.width = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('fileRef', node)
if value is not None and 'fileRef' not in already_processed:
already_processed.append('fileRef')
self.fileRef = value
value = find_attr_value_('height', node)
if value is not None and 'height' not in already_processed:
already_processed.append('height')
try:
self.height = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class IconType
class PropertyType(GeneratedsSuper):
"""Property identifierProperty typeA comma-separated set of type
qualifiersDetermines whether the property value is configurable
during installationDefault value for propertyDetermines whether
the property value should be obscured during deployment"""
subclass = None
superclass = None
def __init__(self, userConfigurable=False, value='', key=None, password=False, type_=None, qualifiers=None, Label=None, Description=None, Value=None):
self.userConfigurable = _cast(bool, userConfigurable)
self.value = _cast(None, value)
self.key = _cast(None, key)
self.password = _cast(bool, password)
self.type_ = _cast(None, type_)
self.qualifiers = _cast(None, qualifiers)
self.Label = Label
self.Description = Description
if Value is None:
self.Value = []
else:
self.Value = Value
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if PropertyType.subclass:
return PropertyType.subclass(*args_, **kwargs_)
else:
return PropertyType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Label(self): return self.Label
def set_Label(self, Label): self.Label = Label
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_Value(self): return self.Value
def set_Value(self, Value): self.Value = Value
def add_Value(self, value): self.Value.append(value)
def insert_Value(self, index, value): self.Value[index] = value
def get_userConfigurable(self): return self.userConfigurable
def set_userConfigurable(self, userConfigurable): self.userConfigurable = userConfigurable
def get_value(self): return self.value
def set_value(self, value): self.value = value
def get_key(self): return self.key
def set_key(self, key): self.key = key
def get_password(self): return self.password
def set_password(self, password): self.password = password
def get_type(self): return self.type_
def set_type(self, type_): self.type_ = type_
def get_qualifiers(self): return self.qualifiers
def set_qualifiers(self, qualifiers): self.qualifiers = qualifiers
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='PropertyType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='PropertyType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='PropertyType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.userConfigurable is not None and 'userConfigurable' not in already_processed:
already_processed.append('userConfigurable')
outfile.write(' userConfigurable="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.userConfigurable)), input_name='userConfigurable'))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
outfile.write(' value=%s' % (self.gds_format_string(quote_attrib(self.value).encode(ExternalEncoding), input_name='value'), ))
if self.key is not None and 'key' not in already_processed:
already_processed.append('key')
outfile.write(' key=%s' % (self.gds_format_string(quote_attrib(self.key).encode(ExternalEncoding), input_name='key'), ))
if self.password is not None and 'password' not in already_processed:
already_processed.append('password')
outfile.write(' password="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.password)), input_name='password'))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
outfile.write(' type=%s' % (self.gds_format_string(quote_attrib(self.type_).encode(ExternalEncoding), input_name='type'), ))
if self.qualifiers is not None and 'qualifiers' not in already_processed:
already_processed.append('qualifiers')
outfile.write(' qualifiers=%s' % (self.gds_format_string(quote_attrib(self.qualifiers).encode(ExternalEncoding), input_name='qualifiers'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='PropertyType', fromsubclass_=False):
if self.Label is not None:
self.Label.export(outfile, level, namespace_, name_='Label')
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description')
for Value_ in self.Value:
Value_.export(outfile, level, namespace_, name_='Value')
def hasContent_(self):
if (
self.Label is not None or
self.Description is not None or
self.Value
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='PropertyType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.userConfigurable is not None and 'userConfigurable' not in already_processed:
already_processed.append('userConfigurable')
showIndent(outfile, level)
outfile.write('userConfigurable = %s,\n' % (self.userConfigurable,))
if self.value is not None and 'value' not in already_processed:
already_processed.append('value')
showIndent(outfile, level)
outfile.write('value = "%s",\n' % (self.value,))
if self.key is not None and 'key' not in already_processed:
already_processed.append('key')
showIndent(outfile, level)
outfile.write('key = "%s",\n' % (self.key,))
if self.password is not None and 'password' not in already_processed:
already_processed.append('password')
showIndent(outfile, level)
outfile.write('password = %s,\n' % (self.password,))
if self.type_ is not None and 'type_' not in already_processed:
already_processed.append('type_')
showIndent(outfile, level)
outfile.write('type_ = "%s",\n' % (self.type_,))
if self.qualifiers is not None and 'qualifiers' not in already_processed:
already_processed.append('qualifiers')
showIndent(outfile, level)
outfile.write('qualifiers = "%s",\n' % (self.qualifiers,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Label is not None:
showIndent(outfile, level)
outfile.write('Label=model_.Msg_Type(\n')
self.Label.exportLiteral(outfile, level, name_='Label')
showIndent(outfile, level)
outfile.write('),\n')
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Msg_Type(\n')
self.Description.exportLiteral(outfile, level, name_='Description')
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Value=[\n')
level += 1
for Value_ in self.Value:
showIndent(outfile, level)
outfile.write('model_.PropertyConfigurationValue_Type(\n')
Value_.exportLiteral(outfile, level, name_='PropertyConfigurationValue_Type')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('userConfigurable', node)
if value is not None and 'userConfigurable' not in already_processed:
already_processed.append('userConfigurable')
if value in ('true', '1'):
self.userConfigurable = True
elif value in ('false', '0'):
self.userConfigurable = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.append('value')
self.value = value
value = find_attr_value_('key', node)
if value is not None and 'key' not in already_processed:
already_processed.append('key')
self.key = value
value = find_attr_value_('password', node)
if value is not None and 'password' not in already_processed:
already_processed.append('password')
if value in ('true', '1'):
self.password = True
elif value in ('false', '0'):
self.password = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('type', node)
if value is not None and 'type' not in already_processed:
already_processed.append('type')
self.type_ = value
value = find_attr_value_('qualifiers', node)
if value is not None and 'qualifiers' not in already_processed:
already_processed.append('qualifiers')
self.qualifiers = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Label':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Label(obj_)
elif nodeName_ == 'Description':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Description(obj_)
elif nodeName_ == 'Value':
obj_ = PropertyConfigurationValue_Type.factory()
obj_.build(child_)
self.Value.append(obj_)
# end class PropertyType
class NetworkType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, name=None, Description=None):
self.name = _cast(None, name)
self.Description = Description
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if NetworkType.subclass:
return NetworkType.subclass(*args_, **kwargs_)
else:
return NetworkType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_name(self): return self.name
def set_name(self, name): self.name = name
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='NetworkType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='NetworkType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='NetworkType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.name is not None and 'name' not in already_processed:
already_processed.append('name')
outfile.write(' name=%s' % (self.gds_format_string(quote_attrib(self.name).encode(ExternalEncoding), input_name='name'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='NetworkType', fromsubclass_=False):
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description')
def hasContent_(self):
if (
self.Description is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='NetworkType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.name is not None and 'name' not in already_processed:
already_processed.append('name')
showIndent(outfile, level)
outfile.write('name = "%s",\n' % (self.name,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Msg_Type(\n')
self.Description.exportLiteral(outfile, level, name_='Description')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('name', node)
if value is not None and 'name' not in already_processed:
already_processed.append('name')
self.name = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Description':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Description(obj_)
# end class NetworkType
class ItemType(GeneratedsSuper):
"""Unique identifier of the content (within a VirtualSystemCollection)
Startup order. Entities are started up starting with lower-
numbers first, starting from 0. Items with same order identifier
may be started up concurrently or in any order. The order is
reversed for shutdown.Delay in seconds to wait for power on to
completeResumes power-on sequence if guest software reports
okDelay in seconds to wait for power off to completeStart action
to use, valid values are: 'powerOn', 'none' Stop action to use,
valid values are: ''powerOff' , 'guestShutdown', 'none'"""
subclass = None
superclass = None
def __init__(self, stopDelay=0, order=None, startAction='powerOn', startDelay=0, waitingForGuest=False, stopAction='powerOff', id=None):
self.stopDelay = _cast(int, stopDelay)
self.order = _cast(int, order)
self.startAction = _cast(None, startAction)
self.startDelay = _cast(int, startDelay)
self.waitingForGuest = _cast(bool, waitingForGuest)
self.stopAction = _cast(None, stopAction)
self.id = _cast(None, id)
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if ItemType.subclass:
return ItemType.subclass(*args_, **kwargs_)
else:
return ItemType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_stopDelay(self): return self.stopDelay
def set_stopDelay(self, stopDelay): self.stopDelay = stopDelay
def get_order(self): return self.order
def set_order(self, order): self.order = order
def get_startAction(self): return self.startAction
def set_startAction(self, startAction): self.startAction = startAction
def get_startDelay(self): return self.startDelay
def set_startDelay(self, startDelay): self.startDelay = startDelay
def get_waitingForGuest(self): return self.waitingForGuest
def set_waitingForGuest(self, waitingForGuest): self.waitingForGuest = waitingForGuest
def get_stopAction(self): return self.stopAction
def set_stopAction(self, stopAction): self.stopAction = stopAction
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='ItemType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ItemType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='ItemType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.stopDelay is not None and 'stopDelay' not in already_processed:
already_processed.append('stopDelay')
outfile.write(' stopDelay="%s"' % self.gds_format_integer(self.stopDelay, input_name='stopDelay'))
if self.order is not None and 'order' not in already_processed:
already_processed.append('order')
outfile.write(' order="%s"' % self.gds_format_integer(self.order, input_name='order'))
if self.startAction is not None and 'startAction' not in already_processed:
already_processed.append('startAction')
outfile.write(' startAction=%s' % (self.gds_format_string(quote_attrib(self.startAction).encode(ExternalEncoding), input_name='startAction'), ))
if self.startDelay is not None and 'startDelay' not in already_processed:
already_processed.append('startDelay')
outfile.write(' startDelay="%s"' % self.gds_format_integer(self.startDelay, input_name='startDelay'))
if self.waitingForGuest is not None and 'waitingForGuest' not in already_processed:
already_processed.append('waitingForGuest')
outfile.write(' waitingForGuest="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.waitingForGuest)), input_name='waitingForGuest'))
if self.stopAction is not None and 'stopAction' not in already_processed:
already_processed.append('stopAction')
outfile.write(' stopAction=%s' % (self.gds_format_string(quote_attrib(self.stopAction).encode(ExternalEncoding), input_name='stopAction'), ))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='ItemType', fromsubclass_=False):
pass
def hasContent_(self):
if (
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ItemType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.stopDelay is not None and 'stopDelay' not in already_processed:
already_processed.append('stopDelay')
showIndent(outfile, level)
outfile.write('stopDelay = %d,\n' % (self.stopDelay,))
if self.order is not None and 'order' not in already_processed:
already_processed.append('order')
showIndent(outfile, level)
outfile.write('order = %d,\n' % (self.order,))
if self.startAction is not None and 'startAction' not in already_processed:
already_processed.append('startAction')
showIndent(outfile, level)
outfile.write('startAction = "%s",\n' % (self.startAction,))
if self.startDelay is not None and 'startDelay' not in already_processed:
already_processed.append('startDelay')
showIndent(outfile, level)
outfile.write('startDelay = %d,\n' % (self.startDelay,))
if self.waitingForGuest is not None and 'waitingForGuest' not in already_processed:
already_processed.append('waitingForGuest')
showIndent(outfile, level)
outfile.write('waitingForGuest = %s,\n' % (self.waitingForGuest,))
if self.stopAction is not None and 'stopAction' not in already_processed:
already_processed.append('stopAction')
showIndent(outfile, level)
outfile.write('stopAction = "%s",\n' % (self.stopAction,))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('stopDelay', node)
if value is not None and 'stopDelay' not in already_processed:
already_processed.append('stopDelay')
try:
self.stopDelay = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('order', node)
if value is not None and 'order' not in already_processed:
already_processed.append('order')
try:
self.order = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('startAction', node)
if value is not None and 'startAction' not in already_processed:
already_processed.append('startAction')
self.startAction = value
value = find_attr_value_('startDelay', node)
if value is not None and 'startDelay' not in already_processed:
already_processed.append('startDelay')
try:
self.startDelay = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('waitingForGuest', node)
if value is not None and 'waitingForGuest' not in already_processed:
already_processed.append('waitingForGuest')
if value in ('true', '1'):
self.waitingForGuest = True
elif value in ('false', '0'):
self.waitingForGuest = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('stopAction', node)
if value is not None and 'stopAction' not in already_processed:
already_processed.append('stopAction')
self.stopAction = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class ItemType
class ConfigurationType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, default=False, id=None, Label=None, Description=None):
self.default = _cast(bool, default)
self.id = _cast(None, id)
self.Label = Label
self.Description = Description
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if ConfigurationType.subclass:
return ConfigurationType.subclass(*args_, **kwargs_)
else:
return ConfigurationType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_Label(self): return self.Label
def set_Label(self, Label): self.Label = Label
def get_Description(self): return self.Description
def set_Description(self, Description): self.Description = Description
def get_default(self): return self.default
def set_default(self, default): self.default = default
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='ConfigurationType', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='ConfigurationType')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='ConfigurationType'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
if self.default is not None and 'default' not in already_processed:
already_processed.append('default')
outfile.write(' default="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.default)), input_name='default'))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
outfile.write(' id=%s' % (self.gds_format_string(quote_attrib(self.id).encode(ExternalEncoding), input_name='id'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='ConfigurationType', fromsubclass_=False):
if self.Label is not None:
self.Label.export(outfile, level, namespace_, name_='Label', )
if self.Description is not None:
self.Description.export(outfile, level, namespace_, name_='Description', )
def hasContent_(self):
if (
self.Label is not None or
self.Description is not None
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='ConfigurationType'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.default is not None and 'default' not in already_processed:
already_processed.append('default')
showIndent(outfile, level)
outfile.write('default = %s,\n' % (self.default,))
if self.id is not None and 'id' not in already_processed:
already_processed.append('id')
showIndent(outfile, level)
outfile.write('id = "%s",\n' % (self.id,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
def exportLiteralChildren(self, outfile, level, name_):
if self.Label is not None:
showIndent(outfile, level)
outfile.write('Label=model_.Msg_Type(\n')
self.Label.exportLiteral(outfile, level, name_='Label')
showIndent(outfile, level)
outfile.write('),\n')
if self.Description is not None:
showIndent(outfile, level)
outfile.write('Description=model_.Msg_Type(\n')
self.Description.exportLiteral(outfile, level, name_='Description')
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('default', node)
if value is not None and 'default' not in already_processed:
already_processed.append('default')
if value in ('true', '1'):
self.default = True
elif value in ('false', '0'):
self.default = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.append('id')
self.id = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'Label':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Label(obj_)
elif nodeName_ == 'Description':
obj_ = Msg_Type.factory()
obj_.build(child_)
self.set_Description(obj_)
# end class ConfigurationType
class RASD_Type(CIM_ResourceAllocationSettingData_Type):
"""Wrapper for CIM_ResourceAllocationSettingData_TypeDetermines whether
import should fail if entry is not understoodConfiguration from
DeploymentOptionSection this entry is valid forStates that this
entry is a range marker"""
subclass = None
superclass = CIM_ResourceAllocationSettingData_Type
def __init__(self, Address=None, AddressOnParent=None, AllocationUnits=None, AutomaticAllocation=None, AutomaticDeallocation=None, Caption=None, Connection=None, ConsumerVisibility=None, Description=None, ElementName=None, HostResource=None, InstanceID=None, Limit=None, MappingBehavior=None, OtherResourceType=None, Parent=None, PoolID=None, Reservation=None, ResourceSubType=None, ResourceType=None, VirtualQuantity=None, VirtualQuantityUnits=None, Weight=None, anytypeobjs_=None, required=True, bound=None, configuration=None):
super(RASD_Type, self).__init__(Address, AddressOnParent, AllocationUnits, AutomaticAllocation, AutomaticDeallocation, Caption, Connection, ConsumerVisibility, Description, ElementName, HostResource, InstanceID, Limit, MappingBehavior, OtherResourceType, Parent, PoolID, Reservation, ResourceSubType, ResourceType, VirtualQuantity, VirtualQuantityUnits, Weight, anytypeobjs_, )
self.required = _cast(bool, required)
self.bound = _cast(None, bound)
self.configuration = _cast(None, configuration)
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if RASD_Type.subclass:
return RASD_Type.subclass(*args_, **kwargs_)
else:
return RASD_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_required(self): return self.required
def set_required(self, required): self.required = required
def get_bound(self): return self.bound
def set_bound(self, bound): self.bound = bound
def get_configuration(self): return self.configuration
def set_configuration(self, configuration): self.configuration = configuration
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='RASD_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='RASD_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='RASD_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
super(RASD_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='RASD_Type')
if self.required is not None and 'required' not in already_processed:
already_processed.append('required')
outfile.write(' required="%s"' % self.gds_format_boolean(self.gds_str_lower(str(self.required)), input_name='required'))
if self.bound is not None and 'bound' not in already_processed:
already_processed.append('bound')
outfile.write(' bound=%s' % (self.gds_format_string(quote_attrib(self.bound).encode(ExternalEncoding), input_name='bound'), ))
if self.configuration is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
outfile.write(' configuration=%s' % (self.gds_format_string(quote_attrib(self.configuration).encode(ExternalEncoding), input_name='configuration'), ))
def exportChildren(self, outfile, level, namespace_='ovf:', name_='RASD_Type', fromsubclass_=False):
super(RASD_Type, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(RASD_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='RASD_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.required is not None and 'required' not in already_processed:
already_processed.append('required')
showIndent(outfile, level)
outfile.write('required = %s,\n' % (self.required,))
if self.bound is not None and 'bound' not in already_processed:
already_processed.append('bound')
showIndent(outfile, level)
outfile.write('bound = "%s",\n' % (self.bound,))
if self.configuration is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
showIndent(outfile, level)
outfile.write('configuration = "%s",\n' % (self.configuration,))
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
super(RASD_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(RASD_Type, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('required', node)
if value is not None and 'required' not in already_processed:
already_processed.append('required')
if value in ('true', '1'):
self.required = True
elif value in ('false', '0'):
self.required = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('bound', node)
if value is not None and 'bound' not in already_processed:
already_processed.append('bound')
self.bound = value
value = find_attr_value_('configuration', node)
if value is not None and 'configuration' not in already_processed:
already_processed.append('configuration')
self.configuration = value
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
super(RASD_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(RASD_Type, self).buildChildren(child_, node, nodeName_, True)
pass
# end class RASD_Type
class VSSD_Type(CIM_VirtualSystemSettingData_Type):
"""Wrapper for CIM_VirtualSystemSettingData_Type"""
subclass = None
superclass = CIM_VirtualSystemSettingData_Type
def __init__(self, AutomaticRecoveryAction=None, AutomaticShutdownAction=None, AutomaticStartupAction=None, AutomaticStartupActionDelay=None, AutomaticStartupActionSequenceNumber=None, Caption=None, ConfigurationDataRoot=None, ConfigurationFile=None, ConfigurationID=None, CreationTime=None, Description=None, ElementName=None, InstanceID=None, LogDataRoot=None, Notes=None, RecoveryFile=None, SnapshotDataRoot=None, SuspendDataRoot=None, SwapFileDataRoot=None, VirtualSystemIdentifier=None, VirtualSystemType=None, anytypeobjs_=None):
super(VSSD_Type, self).__init__(AutomaticRecoveryAction, AutomaticShutdownAction, AutomaticStartupAction, AutomaticStartupActionDelay, AutomaticStartupActionSequenceNumber, Caption, ConfigurationDataRoot, ConfigurationFile, ConfigurationID, CreationTime, Description, ElementName, InstanceID, LogDataRoot, Notes, RecoveryFile, SnapshotDataRoot, SuspendDataRoot, SwapFileDataRoot, VirtualSystemIdentifier, VirtualSystemType, anytypeobjs_, )
self.anyAttributes_ = {}
def factory(*args_, **kwargs_):
if VSSD_Type.subclass:
return VSSD_Type.subclass(*args_, **kwargs_)
else:
return VSSD_Type(*args_, **kwargs_)
factory = staticmethod(factory)
def get_anyAttributes_(self): return self.anyAttributes_
def set_anyAttributes_(self, anyAttributes_): self.anyAttributes_ = anyAttributes_
def export(self, outfile, level, namespace_='ovf:', name_='VSSD_Type', namespacedef_=''):
showIndent(outfile, level)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = []
self.exportAttributes(outfile, level, already_processed, namespace_, name_='VSSD_Type')
if self.hasContent_():
outfile.write('>\n')
self.exportChildren(outfile, level + 1, namespace_, name_)
showIndent(outfile, level)
outfile.write('</%s%s>\n' % (namespace_, name_))
else:
outfile.write('/>\n')
def exportAttributes(self, outfile, level, already_processed, namespace_='ovf:', name_='VSSD_Type'):
unique_counter = 0
for name, value in self.anyAttributes_.items():
xsinamespaceprefix = 'xsi'
xsinamespace1 = 'http://www.w3.org/2001/XMLSchema-instance'
xsinamespace2 = '{%s}' % (xsinamespace1, )
if name.startswith(xsinamespace2):
name1 = name[len(xsinamespace2):]
name2 = '%s:%s' % (xsinamespaceprefix, name1, )
if name2 not in already_processed:
already_processed.append(name2)
outfile.write(' %s=%s' % (name2, quote_attrib(value), ))
else:
mo = re_.match(Namespace_extract_pat_, name)
if mo is not None:
namespace, name = mo.group(1, 2)
if name not in already_processed:
already_processed.append(name)
if namespace == 'http://www.w3.org/XML/1998/namespace':
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
else:
unique_counter += 1
outfile.write(' xmlns:yyy%d="%s"' % (unique_counter, namespace, ))
outfile.write(' yyy%d:%s=%s' % (unique_counter, name, quote_attrib(value), ))
else:
if name not in already_processed:
already_processed.append(name)
outfile.write(' %s=%s' % (name, quote_attrib(value), ))
super(VSSD_Type, self).exportAttributes(outfile, level, already_processed, namespace_, name_='VSSD_Type')
def exportChildren(self, outfile, level, namespace_='ovf:', name_='VSSD_Type', fromsubclass_=False):
super(VSSD_Type, self).exportChildren(outfile, level, namespace_, name_, True)
def hasContent_(self):
if (
super(VSSD_Type, self).hasContent_()
):
return True
else:
return False
def exportLiteral(self, outfile, level, name_='VSSD_Type'):
level += 1
self.exportLiteralAttributes(outfile, level, [], name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
for name, value in self.anyAttributes_.items():
showIndent(outfile, level)
outfile.write('%s = "%s",\n' % (name, value,))
super(VSSD_Type, self).exportLiteralAttributes(outfile, level, already_processed, name_)
def exportLiteralChildren(self, outfile, level, name_):
super(VSSD_Type, self).exportLiteralChildren(outfile, level, name_)
def build(self, node):
self.buildAttributes(node, node.attrib, [])
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
self.anyAttributes_ = {}
for name, value in attrs.items():
if name not in already_processed:
self.anyAttributes_[name] = value
super(VSSD_Type, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(VSSD_Type, self).buildChildren(child_, node, nodeName_, True)
pass
# end class VSSD_Type
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print USAGE_TEXT
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Envelope'
rootClass = EnvelopeType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
# sys.stdout.write('<?xml version="1.0" ?>\n')
# rootObj.export(sys.stdout, 0, name_=rootTag,
# namespacedef_='')
return rootObj
def parseString(inString):
from StringIO import StringIO
doc = parsexml_(StringIO(inString))
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Envelope'
rootClass = EnvelopeType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0, name_="Envelope",
namespacedef_='')
return rootObj
def parseLiteral(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'Envelope'
rootClass = EnvelopeType
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('#from ovfenvelope import *\n\n')
sys.stdout.write('import ovfenvelope as model_\n\n')
sys.stdout.write('rootObj = model_.rootTag(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"AnnotationSection_Type",
"CIM_ResourceAllocationSettingData_Type",
"CIM_VirtualSystemSettingData_Type",
"Caption",
"ConfigurationType",
"Content_Type",
"DeploymentOptionSection_Type",
"DiskSection_Type",
"EnvelopeType",
"EulaSection_Type",
"File_Type",
"IconType",
"InstallSection_Type",
"ItemType",
"MsgType",
"Msg_Type",
"NetworkSection_Type",
"NetworkType",
"OperatingSystemSection_Type",
"ProductSection_Type",
"PropertyConfigurationValue_Type",
"PropertyType",
"RASD_Type",
"References_Type",
"ResourceAllocationSection_Type",
"Section_Type",
"StartupSection_Type",
"Strings_Type",
"VSSD_Type",
"VirtualDiskDesc_Type",
"VirtualHardwareSection_Type",
"VirtualSystemCollection_Type",
"VirtualSystem_Type",
"cimAnySimpleType",
"cimBase64Binary",
"cimBoolean",
"cimByte",
"cimChar16",
"cimDateTime",
"cimDouble",
"cimFloat",
"cimHexBinary",
"cimInt",
"cimLong",
"cimReference",
"cimShort",
"cimString",
"cimUnsignedByte",
"cimUnsignedInt",
"cimUnsignedLong",
"cimUnsignedShort",
"qualifierBoolean",
"qualifierSArray",
"qualifierSInt64",
"qualifierString",
"qualifierUInt32"
]
|
Dhandapani/gluster-ovirt
|
backend/manager/tools/engine-image-uploader/src/ovf/ovfenvelope.py
|
Python
|
apache-2.0
| 398,478
|
package krasa.mavenhelper.model;
public class Alias extends DomainObject {
private String from;
private String to;
public Alias() {
}
public Alias(String from, String to) {
this.from = from;
this.to = to;
}
public static Alias of(String name, String value) {
return new Alias(name, value);
}
public String getFrom() {
return from;
}
public void setFrom(String from) {
this.from = from;
}
public String getTo() {
return to;
}
public void setTo(String to) {
this.to = to;
}
public String applyTo(String commandLine) {
if (from != null && to != null) {
return commandLine.replace(from, to);
}
return commandLine;
}
}
|
fengyie007/MavenHelper
|
src/main/java/krasa/mavenhelper/model/Alias.java
|
Java
|
apache-2.0
| 665
|
# Arecacicola calami Joanne E. Taylor, J. Fröhl. & K.D. Hyde, 2001 SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Mycoscience 42(4): 370 (2001)
#### Original name
Arecacicola calami Joanne E. Taylor, J. Fröhl. & K.D. Hyde, 2001
### Remarks
null
|
mdoering/backbone
|
life/Fungi/Ascomycota/Sordariomycetes/Arecacicola/Arecacicola calami/README.md
|
Markdown
|
apache-2.0
| 309
|
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.collection.primitive;
public interface PrimitiveIntVisitor<E extends Exception>
{
/**
* Visit the given entry.
*
* @param value A distinct value from the set.
* @return 'true' to signal that the iteration should be stopped, 'false' to signal that the iteration should
* continue if there are more entries to look at.
* @throws E any thrown exception of type 'E' will bubble up through the 'visit' method.
*/
boolean visited( int value ) throws E;
}
|
HuangLS/neo4j
|
community/primitive-collections/src/main/java/org/neo4j/collection/primitive/PrimitiveIntVisitor.java
|
Java
|
apache-2.0
| 1,307
|
---
version: 1.0.0
title: Параллелизм
---
Одна из сильных сторон Elixir — поддержка параллелизма. Благодаря Erlang VM (BEAM) параллелизм в Elixir легче, чем вы думали. В основе модели параллелизма лежат акторы — процессы, взаимодействующие с другими процессами путём передачи сообщений.
В этом уроке мы познакомимся с модулями параллелизма, поставляемыми вместе с Elixir. В следующей части мы также узнаем, каким способом они реализованы в OTP.
{% include toc.html %}
## Процессы
Процессы в Erlang VM легковесны и выполняются на всех процессорах. Они могут показаться похожими на нативные потоки, но они проще, и вполне нормально иметь тысячи параллельных процессов в одном приложении Elixir.
Простейший способ создать новый процесс это функция `spawn`, принимающая анонимную или именованную функцию. Когда мы создаём новый процесс, он возвращает _Идентификатор процесса_, или PID, для однозначного определения внутри нашего приложения.
Для начала создадим модуль и опишем функцию, которую мы хотели бы запустить:
```elixir
defmodule Example do
def add(a, b) do
IO.puts(a + b)
end
end
iex> Example.add(2, 3)
5
:ok
```
Чтобы выполнить функцию асинхронно, воспользуемся `spawn/3`:
```elixir
iex> spawn(Example, :add, [2, 3])
5
#PID<0.80.0>
```
### Передача сообщений
Для взаимодействия между собой процессы используют сообщения. Для этого существует две части: `send/2` и `receive`. Функция `send/2` позволяет отправлять сообщения PID'y. Для получения и проверки сообщений используется `receive`. Если при проверке совпадение не будет найдено, выполнение продолжится.
```elixir
defmodule Example do
def listen do
receive do
{:ok, "hello"} -> IO.puts "World"
end
listen
end
end
iex> pid = spawn(Example, :listen, [])
#PID<0.108.0>
iex> send pid, {:ok, "hello"}
World
{:ok, "hello"}
iex> send pid, :ok
:ok
```
Стоит заметить, что функция `listen/0` рекурсивна (вызывает саму себя), что позволяет этому процессу обработать несколько сообщений. Без этого вызова процесс завершит свою работу после обработки первого сообщения.
### Связывание процессов
Одна из проблем при использовании `spawn` — узнать о выходе процесса из строя. Для этого мы свяжем наши процессы с помощью `spawn_link`. Два связанных процесса будут получать друг от друга уведомления о завершении:
```elixir
defmodule Example do
def explode, do: exit(:kaboom)
end
iex> spawn(Example, :explode, [])
#PID<0.66.0>
iex> spawn_link(Example, :explode, [])
** (EXIT from #PID<0.57.0>) evaluator process exited with reason: :kaboom
```
Иногда мы не хотим, чтобы связанный процесс завершал текущий. Для этого нужно перехватывать попытки завершения. Перехваченные попытки будут получены в виде сообщения-кортежа: `{:EXIT, from_pid, reason}`.
```elixir
defmodule Example do
def explode, do: exit(:kaboom)
def run do
Process.flag(:trap_exit, true)
spawn_link(Example, :explode, [])
receive do
{:EXIT, from_pid, reason} -> IO.puts "Exit reason: #{reason}"
end
end
end
iex> Example.run
Exit reason: kaboom
:ok
```
### Мониторинг процессов
Но что делать, если мы не хотим связывать два процесса, но при этом хотим получать информацию? Можно воспользоваться `spawn_monitor` для мониторинга процесса. При наблюдении за процессом мы получаем сообщения, если процесс выйдет из строя, без завершения текущего процесса и необходимости явно перехватывать попытки завершения.
```elixir
defmodule Example do
def explode, do: exit(:kaboom)
def run do
{pid, ref} = spawn_monitor(Example, :explode, [])
receive do
{:DOWN, ref, :process, from_pid, reason} -> IO.puts "Exit reason: #{reason}"
end
end
end
iex> Example.run
Exit reason: kaboom
:ok
```
## Агенты
Агенты — абстракция над фоновыми процессами, сохраняющими состояние. Мы можем получить доступ к ним из другого процесса нашего приложения. Состояние агента устанавливается равным возвращаемому значению нашей функции:
```elixir
iex> {:ok, agent} = Agent.start_link(fn -> [1, 2, 3] end)
{:ok, #PID<0.65.0>}
iex> Agent.update(agent, fn (state) -> state ++ [4, 5] end)
:ok
iex> Agent.get(agent, &(&1))
[1, 2, 3, 4, 5]
```
Если мы зададим имя агенту, то сможем обращаться к нему, используя имя, а не PID:
```elixir
iex> Agent.start_link(fn -> [1, 2, 3] end, name: Numbers)
{:ok, #PID<0.74.0>}
iex> Agent.get(Numbers, &(&1))
[1, 2, 3]
```
## Задачи
Задачи предоставляют возможность выполнять функцию в фоновом режиме и получать её значение потом. Они могут быть особенно полезны при обработке дорогостоящих операций без блокировки выполнения приложения.
```elixir
defmodule Example do
def double(x) do
:timer.sleep(2000)
x * 2
end
end
iex> task = Task.async(Example, :double, [2000])
%Task{pid: #PID<0.111.0>, ref: #Reference<0.0.8.200>}
# Делаем что-нибудь
iex> Task.await(task)
4000
```
|
erickgnavar/elixir-school
|
ru/lessons/advanced/concurrency.md
|
Markdown
|
apache-2.0
| 7,105
|
/* First created by JCasGen Sat Jul 21 15:40:55 EDT 2012 */
package edu.cmu.lti.oaqa.framework.types;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.JCasRegistry;
import org.apache.uima.cas.impl.CASImpl;
import org.apache.uima.cas.impl.FSGenerator;
import org.apache.uima.cas.FeatureStructure;
import org.apache.uima.cas.impl.TypeImpl;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.impl.FeatureImpl;
import org.apache.uima.cas.Feature;
import org.apache.uima.jcas.tcas.Annotation_Type;
/**
* Updated by JCasGen Fri Jan 18 19:04:09 EST 2013
* @generated */
public class ProcessingStep_Type extends Annotation_Type {
/** @generated */
protected FSGenerator getFSGenerator() {return fsGenerator;}
/** @generated */
private final FSGenerator fsGenerator =
new FSGenerator() {
public FeatureStructure createFS(int addr, CASImpl cas) {
if (ProcessingStep_Type.this.useExistingInstance) {
// Return eq fs instance if already created
FeatureStructure fs = ProcessingStep_Type.this.jcas.getJfsFromCaddr(addr);
if (null == fs) {
fs = new ProcessingStep(addr, ProcessingStep_Type.this);
ProcessingStep_Type.this.jcas.putJfsFromCaddr(addr, fs);
return fs;
}
return fs;
} else return new ProcessingStep(addr, ProcessingStep_Type.this);
}
};
/** @generated */
public final static int typeIndexID = ProcessingStep.typeIndexID;
/** @generated
@modifiable */
public final static boolean featOkTst = JCasRegistry.getFeatOkTst("edu.cmu.lti.oaqa.framework.types.ProcessingStep");
/** @generated */
final Feature casFeat_phaseId;
/** @generated */
final int casFeatCode_phaseId;
/** @generated */
public int getPhaseId(int addr) {
if (featOkTst && casFeat_phaseId == null)
jcas.throwFeatMissing("phaseId", "edu.cmu.lti.oaqa.framework.types.ProcessingStep");
return ll_cas.ll_getIntValue(addr, casFeatCode_phaseId);
}
/** @generated */
public void setPhaseId(int addr, int v) {
if (featOkTst && casFeat_phaseId == null)
jcas.throwFeatMissing("phaseId", "edu.cmu.lti.oaqa.framework.types.ProcessingStep");
ll_cas.ll_setIntValue(addr, casFeatCode_phaseId, v);}
/** @generated */
final Feature casFeat_component;
/** @generated */
final int casFeatCode_component;
/** @generated */
public String getComponent(int addr) {
if (featOkTst && casFeat_component == null)
jcas.throwFeatMissing("component", "edu.cmu.lti.oaqa.framework.types.ProcessingStep");
return ll_cas.ll_getStringValue(addr, casFeatCode_component);
}
/** @generated */
public void setComponent(int addr, String v) {
if (featOkTst && casFeat_component == null)
jcas.throwFeatMissing("component", "edu.cmu.lti.oaqa.framework.types.ProcessingStep");
ll_cas.ll_setStringValue(addr, casFeatCode_component, v);}
/** @generated */
final Feature casFeat_casId;
/** @generated */
final int casFeatCode_casId;
/** @generated */
public String getCasId(int addr) {
if (featOkTst && casFeat_casId == null)
jcas.throwFeatMissing("casId", "edu.cmu.lti.oaqa.framework.types.ProcessingStep");
return ll_cas.ll_getStringValue(addr, casFeatCode_casId);
}
/** @generated */
public void setCasId(int addr, String v) {
if (featOkTst && casFeat_casId == null)
jcas.throwFeatMissing("casId", "edu.cmu.lti.oaqa.framework.types.ProcessingStep");
ll_cas.ll_setStringValue(addr, casFeatCode_casId, v);}
/** initialize variables to correspond with Cas Type and Features
* @generated */
public ProcessingStep_Type(JCas jcas, Type casType) {
super(jcas, casType);
casImpl.getFSClassRegistry().addGeneratorForType((TypeImpl)this.casType, getFSGenerator());
casFeat_phaseId = jcas.getRequiredFeatureDE(casType, "phaseId", "uima.cas.Integer", featOkTst);
casFeatCode_phaseId = (null == casFeat_phaseId) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_phaseId).getCode();
casFeat_component = jcas.getRequiredFeatureDE(casType, "component", "uima.cas.String", featOkTst);
casFeatCode_component = (null == casFeat_component) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_component).getCode();
casFeat_casId = jcas.getRequiredFeatureDE(casType, "casId", "uima.cas.String", featOkTst);
casFeatCode_casId = (null == casFeat_casId) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_casId).getCode();
}
}
|
oaqa/uima-ecd
|
src/main/java/edu/cmu/lti/oaqa/framework/types/ProcessingStep_Type.java
|
Java
|
apache-2.0
| 4,558
|
'use strict';
var mongodb = require ('@onehilltech/blueprint-mongodb')
, StatPlugin = mongodb.plugins.StatPlugin
;
var schema = new mongodb.Schema ({
name: {type: String, required: true, trim: true},
location: {type: String, required: true, trim: true},
website: {type: String, required: true, trim: true},
industry: {type: String, required: true, trim: true}
});
// register stat plugin with schema
schema.plugin (StatPlugin);
const MODEL_NAME = 'organization';
const COLLECTION_NAME = 'organizations';
module.exports = mongodb.resource (MODEL_NAME, schema, COLLECTION_NAME);
|
DannyPeck/hive-apiserver
|
app/models/Organization.js
|
JavaScript
|
apache-2.0
| 610
|
#
# Author:: Jimmy McCrory (<jimmy.mccrory@gmail.com>)
# Copyright:: Copyright (c) 2014 Jimmy McCrory
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe Seth::ceth::NodeEnvironmentSet do
before(:each) do
Seth::Config[:node_name] = "webmonkey.example.com"
@ceth = Seth::ceth::NodeEnvironmentSet.new
@ceth.name_args = [ "adam", "bar" ]
@ceth.stub(:output).and_return(true)
@node = Seth::Node.new()
@node.name("cethtest-node")
@node.seth_environment << "foo"
@node.stub(:save).and_return(true)
Seth::Node.stub(:load).and_return(@node)
end
describe "run" do
it "should load the node" do
Seth::Node.should_receive(:load).with("adam")
@ceth.run
end
it "should update the environment" do
@ceth.run
@node.seth_environment.should == 'bar'
end
it "should save the node" do
@node.should_receive(:save)
@ceth.run
end
it "should print the environment" do
@ceth.should_receive(:output).and_return(true)
@ceth.run
end
describe "with no environment" do
# Set up outputs for inspection later
before(:each) do
@stdout = StringIO.new
@stderr = StringIO.new
@ceth.ui.stub(:stdout).and_return(@stdout)
@ceth.ui.stub(:stderr).and_return(@stderr)
end
it "should exit" do
@ceth.name_args = [ "adam" ]
lambda { @ceth.run }.should raise_error SystemExit
end
it "should show the user the usage and an error" do
@ceth.name_args = [ "adam" ]
begin ; @ceth.run ; rescue SystemExit ; end
@stdout.string.should eq "USAGE: ceth node environment set NODE ENVIRONMENT\n"
@stderr.string.should eq "FATAL: You must specify a node name and an environment.\n"
end
end
end
end
|
danielsdeleo/seth
|
spec/unit/knife/node_environment_set_spec.rb
|
Ruby
|
apache-2.0
| 2,372
|
/*
* Copyright 2016 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.bot.model.event.beacon;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Value;
@Value
public class BeaconContent {
/**
* Hardware ID of the beacon that was detected.
*/
String hwid;
/**
* Type of beacon event.
*/
String type;
/**
* Raw deviceMessages. Possibly {@code null}.
*
* @see #getDeviceMessage()
*/
byte[] deviceMessage;
/**
* Device message of beacon that was detected in byte[] format. (Optional).
*
* <p>The “device message” consists of data generated by the beacon to send notifications to bots.
*
* <p>The beacon.dm property is only included in webhooks
* from devices that support the “device message” property.
* If device message is not included in webhooks, value is {@code null}.</p>
*
* <p>You can use beacon.dm with the LINE Simple Beacon specification.</p>
*
* @see #getDeviceMessageAsHex()
* @see <a href="https://github.com/line/line-simple-beacon/blob/master/README.en.md#line-simple-beacon-frame">LINE Simple Beacon specification (en)</a>
* @see <a href="https://github.com/line/line-simple-beacon/blob/master/README.ja.md#line-simple-beacon-frame">LINE Simple Beacon specification (ja)</a>
*/
public byte[] getDeviceMessage() {
if (deviceMessage == null) {
return null;
}
return deviceMessage.clone(); // Defensive copy.
}
/**
* Device message of beacon that was detected in lower-case, hex String format. (Optional)
*
* @see #getDeviceMessage()
*/
public String getDeviceMessageAsHex() {
return BeaconContentUtil.printHexBinary(deviceMessage);
}
/**
* Create new {@link BeaconContent}.
*/
@JsonCreator
public BeaconContent(
@JsonProperty("hwid") final String hwid,
@JsonProperty("type") final String type,
@JsonProperty("dm") final String deviceMessage) {
this.hwid = hwid;
this.type = type;
this.deviceMessage = BeaconContentUtil.parseBytesOrNull(deviceMessage);
}
// Delombok for byte[] pretty print.
@Override
public String toString() {
return "BeaconContent" + "(hwid=" + getHwid() + ", type=" + getType() + ", deviceMessage="
+ getDeviceMessageAsHex() + ')';
}
}
|
line/line-bot-sdk-java
|
line-bot-model/src/main/java/com/linecorp/bot/model/event/beacon/BeaconContent.java
|
Java
|
apache-2.0
| 3,087
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.om.helpers;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.KeyInfo;
import org.apache.hadoop.util.Time;
import java.io.IOException;
import java.util.List;
import java.util.stream.Collectors;
/**
* Args for key block. The block instance for the key requested in putKey.
* This is returned from OM to client, and client use class to talk to
* datanode. Also, this is the metadata written to om.db on server side.
*/
public final class OmKeyInfo {
private final String volumeName;
private final String bucketName;
// name of key client specified
private String keyName;
private long dataSize;
private List<OmKeyLocationInfoGroup> keyLocationVersions;
private final long creationTime;
private long modificationTime;
private HddsProtos.ReplicationType type;
private HddsProtos.ReplicationFactor factor;
private OmKeyInfo(String volumeName, String bucketName, String keyName,
List<OmKeyLocationInfoGroup> versions, long dataSize,
long creationTime, long modificationTime, HddsProtos.ReplicationType type,
HddsProtos.ReplicationFactor factor) {
this.volumeName = volumeName;
this.bucketName = bucketName;
this.keyName = keyName;
this.dataSize = dataSize;
// it is important that the versions are ordered from old to new.
// Do this sanity check when versions got loaded on creating OmKeyInfo.
// TODO : this is not necessary, here only because versioning is still a
// work in-progress, remove this following check when versioning is
// complete and prove correctly functioning
long currentVersion = -1;
for (OmKeyLocationInfoGroup version : versions) {
Preconditions.checkArgument(
currentVersion + 1 == version.getVersion());
currentVersion = version.getVersion();
}
this.keyLocationVersions = versions;
this.creationTime = creationTime;
this.modificationTime = modificationTime;
this.factor = factor;
this.type = type;
}
public String getVolumeName() {
return volumeName;
}
public String getBucketName() {
return bucketName;
}
public HddsProtos.ReplicationType getType() {
return type;
}
public HddsProtos.ReplicationFactor getFactor() {
return factor;
}
public String getKeyName() {
return keyName;
}
public void setKeyName(String keyName) {
this.keyName = keyName;
}
public long getDataSize() {
return dataSize;
}
public void setDataSize(long size) {
this.dataSize = size;
}
public synchronized OmKeyLocationInfoGroup getLatestVersionLocations() {
return keyLocationVersions.size() == 0? null :
keyLocationVersions.get(keyLocationVersions.size() - 1);
}
public List<OmKeyLocationInfoGroup> getKeyLocationVersions() {
return keyLocationVersions;
}
public void updateModifcationTime() {
this.modificationTime = Time.monotonicNow();
}
/**
* updates the length of the each block in the list given.
* This will be called when the key is being committed to OzoneManager.
*
* @param locationInfoList list of locationInfo
* @throws IOException
*/
public void updateLocationInfoList(List<OmKeyLocationInfo> locationInfoList) {
long latestVersion = getLatestVersionLocations().getVersion();
OmKeyLocationInfoGroup keyLocationInfoGroup = getLatestVersionLocations();
List<OmKeyLocationInfo> currentList =
keyLocationInfoGroup.getLocationList();
List<OmKeyLocationInfo> latestVersionList =
keyLocationInfoGroup.getBlocksLatestVersionOnly();
// Updates the latest locationList in the latest version only with
// given locationInfoList here.
// TODO : The original allocated list and the updated list here may vary
// as the containers on the Datanode on which the blocks were pre allocated
// might get closed. The diff of blocks between these two lists here
// need to be garbage collected in case the ozone client dies.
currentList.removeAll(latestVersionList);
// set each of the locationInfo object to the latest version
locationInfoList.stream().forEach(omKeyLocationInfo -> omKeyLocationInfo
.setCreateVersion(latestVersion));
currentList.addAll(locationInfoList);
}
/**
* Append a set of blocks to the latest version. Note that these blocks are
* part of the latest version, not a new version.
*
* @param newLocationList the list of new blocks to be added.
* @throws IOException
*/
public synchronized void appendNewBlocks(
List<OmKeyLocationInfo> newLocationList) throws IOException {
if (keyLocationVersions.size() == 0) {
throw new IOException("Appending new block, but no version exist");
}
OmKeyLocationInfoGroup currentLatestVersion =
keyLocationVersions.get(keyLocationVersions.size() - 1);
currentLatestVersion.appendNewBlocks(newLocationList);
setModificationTime(Time.now());
}
/**
* Add a new set of blocks. The new blocks will be added as appending a new
* version to the all version list.
*
* @param newLocationList the list of new blocks to be added.
* @throws IOException
*/
public synchronized long addNewVersion(
List<OmKeyLocationInfo> newLocationList) throws IOException {
long latestVersionNum;
if (keyLocationVersions.size() == 0) {
// no version exist, these blocks are the very first version.
keyLocationVersions.add(new OmKeyLocationInfoGroup(0, newLocationList));
latestVersionNum = 0;
} else {
// it is important that the new version are always at the tail of the list
OmKeyLocationInfoGroup currentLatestVersion =
keyLocationVersions.get(keyLocationVersions.size() - 1);
// the new version is created based on the current latest version
OmKeyLocationInfoGroup newVersion =
currentLatestVersion.generateNextVersion(newLocationList);
keyLocationVersions.add(newVersion);
latestVersionNum = newVersion.getVersion();
}
setModificationTime(Time.now());
return latestVersionNum;
}
public long getCreationTime() {
return creationTime;
}
public long getModificationTime() {
return modificationTime;
}
public void setModificationTime(long modificationTime) {
this.modificationTime = modificationTime;
}
/**
* Builder of OmKeyInfo.
*/
public static class Builder {
private String volumeName;
private String bucketName;
private String keyName;
private long dataSize;
private List<OmKeyLocationInfoGroup> omKeyLocationInfoGroups;
private long creationTime;
private long modificationTime;
private HddsProtos.ReplicationType type;
private HddsProtos.ReplicationFactor factor;
public Builder setVolumeName(String volume) {
this.volumeName = volume;
return this;
}
public Builder setBucketName(String bucket) {
this.bucketName = bucket;
return this;
}
public Builder setKeyName(String key) {
this.keyName = key;
return this;
}
public Builder setOmKeyLocationInfos(
List<OmKeyLocationInfoGroup> omKeyLocationInfoList) {
this.omKeyLocationInfoGroups = omKeyLocationInfoList;
return this;
}
public Builder setDataSize(long size) {
this.dataSize = size;
return this;
}
public Builder setCreationTime(long crTime) {
this.creationTime = crTime;
return this;
}
public Builder setModificationTime(long mTime) {
this.modificationTime = mTime;
return this;
}
public Builder setReplicationFactor(HddsProtos.ReplicationFactor factor) {
this.factor = factor;
return this;
}
public Builder setReplicationType(HddsProtos.ReplicationType type) {
this.type = type;
return this;
}
public OmKeyInfo build() {
return new OmKeyInfo(
volumeName, bucketName, keyName, omKeyLocationInfoGroups,
dataSize, creationTime, modificationTime, type, factor);
}
}
public KeyInfo getProtobuf() {
long latestVersion = keyLocationVersions.size() == 0 ? -1 :
keyLocationVersions.get(keyLocationVersions.size() - 1).getVersion();
return KeyInfo.newBuilder()
.setVolumeName(volumeName)
.setBucketName(bucketName)
.setKeyName(keyName)
.setDataSize(dataSize)
.setFactor(factor)
.setType(type)
.addAllKeyLocationList(keyLocationVersions.stream()
.map(OmKeyLocationInfoGroup::getProtobuf)
.collect(Collectors.toList()))
.setLatestVersion(latestVersion)
.setCreationTime(creationTime)
.setModificationTime(modificationTime)
.build();
}
public static OmKeyInfo getFromProtobuf(KeyInfo keyInfo) {
return new OmKeyInfo(
keyInfo.getVolumeName(),
keyInfo.getBucketName(),
keyInfo.getKeyName(),
keyInfo.getKeyLocationListList().stream()
.map(OmKeyLocationInfoGroup::getFromProtobuf)
.collect(Collectors.toList()),
keyInfo.getDataSize(),
keyInfo.getCreationTime(),
keyInfo.getModificationTime(),
keyInfo.getType(),
keyInfo.getFactor());
}
}
|
ucare-uchicago/hadoop
|
hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyInfo.java
|
Java
|
apache-2.0
| 10,201
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.server.client;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.Callable;
import org.apache.accumulo.core.Constants;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.Instance;
import org.apache.accumulo.core.client.NamespaceNotFoundException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.impl.Namespaces;
import org.apache.accumulo.core.client.impl.Tables;
import org.apache.accumulo.core.client.impl.thrift.ClientService;
import org.apache.accumulo.core.client.impl.thrift.ConfigurationType;
import org.apache.accumulo.core.client.impl.thrift.SecurityErrorCode;
import org.apache.accumulo.core.client.impl.thrift.TDiskUsage;
import org.apache.accumulo.core.client.impl.thrift.TableOperation;
import org.apache.accumulo.core.client.impl.thrift.TableOperationExceptionType;
import org.apache.accumulo.core.client.impl.thrift.ThriftSecurityException;
import org.apache.accumulo.core.client.impl.thrift.ThriftTableOperationException;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.conf.AccumuloConfiguration;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.security.Credentials;
import org.apache.accumulo.core.security.NamespacePermission;
import org.apache.accumulo.core.security.SystemPermission;
import org.apache.accumulo.core.security.TablePermission;
import org.apache.accumulo.core.security.thrift.TCredentials;
import org.apache.accumulo.server.conf.ServerConfiguration;
import org.apache.accumulo.server.fs.VolumeManager;
import org.apache.accumulo.server.security.AuditedSecurityOperation;
import org.apache.accumulo.server.security.SecurityOperation;
import org.apache.accumulo.server.util.TableDiskUsage;
import org.apache.accumulo.server.zookeeper.TransactionWatcher;
import org.apache.accumulo.start.classloader.vfs.AccumuloVFSClassLoader;
import org.apache.accumulo.trace.thrift.TInfo;
import org.apache.log4j.Logger;
import org.apache.thrift.TException;
public class ClientServiceHandler implements ClientService.Iface {
private static final Logger log = Logger.getLogger(ClientServiceHandler.class);
private static SecurityOperation security = AuditedSecurityOperation.getInstance();
protected final TransactionWatcher transactionWatcher;
private final Instance instance;
private final VolumeManager fs;
public ClientServiceHandler(Instance instance, TransactionWatcher transactionWatcher, VolumeManager fs) {
this.instance = instance;
this.transactionWatcher = transactionWatcher;
this.fs = fs;
}
public static String checkTableId(Instance instance, String tableName, TableOperation operation) throws ThriftTableOperationException {
TableOperationExceptionType reason = null;
try {
return Tables._getTableId(instance, tableName);
} catch (NamespaceNotFoundException e) {
reason = TableOperationExceptionType.NAMESPACE_NOTFOUND;
} catch (TableNotFoundException e) {
reason = TableOperationExceptionType.NOTFOUND;
}
throw new ThriftTableOperationException(null, tableName, operation, reason, null);
}
public static String checkNamespaceId(Instance instance, String namespace, TableOperation operation) throws ThriftTableOperationException {
String namespaceId = Namespaces.getNameToIdMap(instance).get(namespace);
if (namespaceId == null) {
// maybe the namespace exists, but the cache was not updated yet... so try to clear the cache and check again
Tables.clearCache(instance);
namespaceId = Namespaces.getNameToIdMap(instance).get(namespace);
if (namespaceId == null)
throw new ThriftTableOperationException(null, namespace, operation, TableOperationExceptionType.NAMESPACE_NOTFOUND, null);
}
return namespaceId;
}
@Override
public String getInstanceId() {
return instance.getInstanceID();
}
@Override
public String getRootTabletLocation() {
return instance.getRootTabletLocation();
}
@Override
public String getZooKeepers() {
return instance.getZooKeepers();
}
@Override
public void ping(TCredentials credentials) {
// anybody can call this; no authentication check
log.info("Master reports: I just got pinged!");
}
@Override
public boolean authenticate(TInfo tinfo, TCredentials credentials) throws ThriftSecurityException {
try {
return security.authenticateUser(credentials, credentials);
} catch (ThriftSecurityException e) {
log.error(e);
throw e;
}
}
@Override
public boolean authenticateUser(TInfo tinfo, TCredentials credentials, TCredentials toAuth) throws ThriftSecurityException {
try {
return security.authenticateUser(credentials, toAuth);
} catch (ThriftSecurityException e) {
log.error(e);
throw e;
}
}
@Override
public void changeAuthorizations(TInfo tinfo, TCredentials credentials, String user, List<ByteBuffer> authorizations) throws ThriftSecurityException {
security.changeAuthorizations(credentials, user, new Authorizations(authorizations));
}
@Override
public void changeLocalUserPassword(TInfo tinfo, TCredentials credentials, String principal, ByteBuffer password) throws ThriftSecurityException {
PasswordToken token = new PasswordToken(password);
Credentials toChange = new Credentials(principal, token);
security.changePassword(credentials, toChange);
}
@Override
public void createLocalUser(TInfo tinfo, TCredentials credentials, String principal, ByteBuffer password) throws ThriftSecurityException {
PasswordToken token = new PasswordToken(password);
Credentials newUser = new Credentials(principal, token);
security.createUser(credentials, newUser, new Authorizations());
}
@Override
public void dropLocalUser(TInfo tinfo, TCredentials credentials, String user) throws ThriftSecurityException {
security.dropUser(credentials, user);
}
@Override
public List<ByteBuffer> getUserAuthorizations(TInfo tinfo, TCredentials credentials, String user) throws ThriftSecurityException {
return security.getUserAuthorizations(credentials, user).getAuthorizationsBB();
}
@Override
public void grantSystemPermission(TInfo tinfo, TCredentials credentials, String user, byte permission) throws ThriftSecurityException {
security.grantSystemPermission(credentials, user, SystemPermission.getPermissionById(permission));
}
@Override
public void grantTablePermission(TInfo tinfo, TCredentials credentials, String user, String tableName, byte permission) throws ThriftSecurityException,
ThriftTableOperationException {
String tableId = checkTableId(instance, tableName, TableOperation.PERMISSION);
String namespaceId = Tables.getNamespaceId(instance, tableId);
security.grantTablePermission(credentials, user, tableId, TablePermission.getPermissionById(permission), namespaceId);
}
@Override
public void grantNamespacePermission(TInfo tinfo, TCredentials credentials, String user, String ns, byte permission) throws ThriftSecurityException,
ThriftTableOperationException {
String namespaceId = checkNamespaceId(instance, ns, TableOperation.PERMISSION);
security.grantNamespacePermission(credentials, user, namespaceId, NamespacePermission.getPermissionById(permission));
}
@Override
public void revokeSystemPermission(TInfo tinfo, TCredentials credentials, String user, byte permission) throws ThriftSecurityException {
security.revokeSystemPermission(credentials, user, SystemPermission.getPermissionById(permission));
}
@Override
public void revokeTablePermission(TInfo tinfo, TCredentials credentials, String user, String tableName, byte permission) throws ThriftSecurityException,
ThriftTableOperationException {
String tableId = checkTableId(instance, tableName, TableOperation.PERMISSION);
String namespaceId = Tables.getNamespaceId(instance, tableId);
security.revokeTablePermission(credentials, user, tableId, TablePermission.getPermissionById(permission), namespaceId);
}
@Override
public boolean hasSystemPermission(TInfo tinfo, TCredentials credentials, String user, byte sysPerm) throws ThriftSecurityException {
return security.hasSystemPermission(credentials, user, SystemPermission.getPermissionById(sysPerm));
}
@Override
public boolean hasTablePermission(TInfo tinfo, TCredentials credentials, String user, String tableName, byte tblPerm) throws ThriftSecurityException,
ThriftTableOperationException {
String tableId = checkTableId(instance, tableName, TableOperation.PERMISSION);
return security.hasTablePermission(credentials, user, tableId, TablePermission.getPermissionById(tblPerm));
}
@Override
public boolean hasNamespacePermission(TInfo tinfo, TCredentials credentials, String user, String ns, byte perm) throws ThriftSecurityException,
ThriftTableOperationException {
String namespaceId = checkNamespaceId(instance, ns, TableOperation.PERMISSION);
return security.hasNamespacePermission(credentials, user, namespaceId, NamespacePermission.getPermissionById(perm));
}
@Override
public void revokeNamespacePermission(TInfo tinfo, TCredentials credentials, String user, String ns, byte permission) throws ThriftSecurityException,
ThriftTableOperationException {
String namespaceId = checkNamespaceId(instance, ns, TableOperation.PERMISSION);
security.revokeNamespacePermission(credentials, user, namespaceId, NamespacePermission.getPermissionById(permission));
}
@Override
public Set<String> listLocalUsers(TInfo tinfo, TCredentials credentials) throws ThriftSecurityException {
return security.listUsers(credentials);
}
private static Map<String,String> conf(TCredentials credentials, AccumuloConfiguration conf) throws TException {
security.authenticateUser(credentials, credentials);
conf.invalidateCache();
Map<String,String> result = new HashMap<String,String>();
for (Entry<String,String> entry : conf) {
String key = entry.getKey();
if (!Property.isSensitive(key))
result.put(key, entry.getValue());
}
return result;
}
@Override
public Map<String,String> getConfiguration(TInfo tinfo, TCredentials credentials, ConfigurationType type) throws TException {
switch (type) {
case CURRENT:
return conf(credentials, new ServerConfiguration(instance).getConfiguration());
case SITE:
return conf(credentials, ServerConfiguration.getSiteConfiguration());
case DEFAULT:
return conf(credentials, AccumuloConfiguration.getDefaultConfiguration());
}
throw new RuntimeException("Unexpected configuration type " + type);
}
@Override
public Map<String,String> getTableConfiguration(TInfo tinfo, TCredentials credentials, String tableName) throws TException, ThriftTableOperationException {
String tableId = checkTableId(instance, tableName, null);
AccumuloConfiguration config = ServerConfiguration.getTableConfiguration(instance, tableId);
return conf(credentials, config);
}
@Override
public List<String> bulkImportFiles(TInfo tinfo, final TCredentials credentials, final long tid, final String tableId, final List<String> files,
final String errorDir, final boolean setTime) throws ThriftSecurityException, ThriftTableOperationException, TException {
try {
if (!security.canPerformSystemActions(credentials))
throw new AccumuloSecurityException(credentials.getPrincipal(), SecurityErrorCode.PERMISSION_DENIED);
return transactionWatcher.run(Constants.BULK_ARBITRATOR_TYPE, tid, new Callable<List<String>>() {
@Override
public List<String> call() throws Exception {
return BulkImporter.bulkLoad(new ServerConfiguration(instance).getConfiguration(), instance, Credentials.fromThrift(credentials), tid, tableId, files, errorDir, setTime);
}
});
} catch (AccumuloSecurityException e) {
throw e.asThriftException();
} catch (Exception ex) {
throw new TException(ex);
}
}
@Override
public boolean isActive(TInfo tinfo, long tid) throws TException {
return transactionWatcher.isActive(tid);
}
@SuppressWarnings({"rawtypes", "unchecked"})
@Override
public boolean checkClass(TInfo tinfo, TCredentials credentials, String className, String interfaceMatch) throws TException {
security.authenticateUser(credentials, credentials);
ClassLoader loader = getClass().getClassLoader();
Class shouldMatch;
try {
shouldMatch = loader.loadClass(interfaceMatch);
Class test = AccumuloVFSClassLoader.loadClass(className, shouldMatch);
test.newInstance();
return true;
} catch (ClassCastException e) {
log.warn("Error checking object types", e);
return false;
} catch (ClassNotFoundException e) {
log.warn("Error checking object types", e);
return false;
} catch (InstantiationException e) {
log.warn("Error checking object types", e);
return false;
} catch (IllegalAccessException e) {
log.warn("Error checking object types", e);
return false;
}
}
@Override
public boolean checkTableClass(TInfo tinfo, TCredentials credentials, String tableName, String className, String interfaceMatch) throws TException,
ThriftTableOperationException, ThriftSecurityException {
security.authenticateUser(credentials, credentials);
String tableId = checkTableId(instance, tableName, null);
ClassLoader loader = getClass().getClassLoader();
Class<?> shouldMatch;
try {
shouldMatch = loader.loadClass(interfaceMatch);
new ServerConfiguration(instance).getTableConfiguration(tableId);
String context = new ServerConfiguration(instance).getTableConfiguration(tableId).get(Property.TABLE_CLASSPATH);
ClassLoader currentLoader;
if (context != null && !context.equals("")) {
currentLoader = AccumuloVFSClassLoader.getContextManager().getClassLoader(context);
} else {
currentLoader = AccumuloVFSClassLoader.getClassLoader();
}
Class<?> test = currentLoader.loadClass(className).asSubclass(shouldMatch);
test.newInstance();
return true;
} catch (Exception e) {
log.warn("Error checking object types", e);
return false;
}
}
@Override
public boolean checkNamespaceClass(TInfo tinfo, TCredentials credentials, String ns, String className, String interfaceMatch) throws TException,
ThriftTableOperationException, ThriftSecurityException {
security.authenticateUser(credentials, credentials);
String namespaceId = checkNamespaceId(instance, ns, null);
ClassLoader loader = getClass().getClassLoader();
Class<?> shouldMatch;
try {
shouldMatch = loader.loadClass(interfaceMatch);
new ServerConfiguration(instance).getNamespaceConfiguration(namespaceId);
String context = new ServerConfiguration(instance).getNamespaceConfiguration(namespaceId).get(Property.TABLE_CLASSPATH);
ClassLoader currentLoader;
if (context != null && !context.equals("")) {
currentLoader = AccumuloVFSClassLoader.getContextManager().getClassLoader(context);
} else {
currentLoader = AccumuloVFSClassLoader.getClassLoader();
}
Class<?> test = currentLoader.loadClass(className).asSubclass(shouldMatch);
test.newInstance();
return true;
} catch (Exception e) {
log.warn("Error checking object types", e);
return false;
}
}
@Override
public List<TDiskUsage> getDiskUsage(Set<String> tables, TCredentials credentials) throws ThriftTableOperationException, ThriftSecurityException, TException {
try {
final Credentials creds = Credentials.fromThrift(credentials);
Connector conn = instance.getConnector(creds.getPrincipal(), creds.getToken());
HashSet<String> tableIds = new HashSet<String>();
for (String table : tables) {
// ensure that table table exists
String tableId = checkTableId(instance, table, null);
tableIds.add(tableId);
String namespaceId = Tables.getNamespaceId(instance, tableId);
if (!security.canScan(credentials, tableId, namespaceId))
throw new ThriftSecurityException(credentials.getPrincipal(), SecurityErrorCode.PERMISSION_DENIED);
}
// use the same set of tableIds that were validated above to avoid race conditions
Map<TreeSet<String>,Long> diskUsage = TableDiskUsage.getDiskUsage(new ServerConfiguration(instance).getConfiguration(), tableIds, fs, conn);
List<TDiskUsage> retUsages = new ArrayList<TDiskUsage>();
for (Map.Entry<TreeSet<String>,Long> usageItem : diskUsage.entrySet()) {
retUsages.add(new TDiskUsage(new ArrayList<String>(usageItem.getKey()), usageItem.getValue()));
}
return retUsages;
} catch (AccumuloSecurityException e) {
throw e.asThriftException();
} catch (AccumuloException e) {
throw new TException(e);
} catch (IOException e) {
throw new TException(e);
}
}
@Override
public Map<String,String> getNamespaceConfiguration(TInfo tinfo, TCredentials credentials, String ns) throws ThriftTableOperationException, TException {
String namespaceId;
try {
namespaceId = Namespaces.getNamespaceId(instance, ns);
} catch (NamespaceNotFoundException e) {
String why = "Could not find namespace while getting configuration.";
throw new ThriftTableOperationException(null, ns, null, TableOperationExceptionType.NAMESPACE_NOTFOUND, why);
}
AccumuloConfiguration config = ServerConfiguration.getNamespaceConfiguration(instance, namespaceId);
return conf(credentials, config);
}
}
|
joshelser/accumulo
|
server/base/src/main/java/org/apache/accumulo/server/client/ClientServiceHandler.java
|
Java
|
apache-2.0
| 19,073
|
/**
* @license
* Copyright 2018 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '@tensorflow/tfjs-core';
import * as tf_tsne from '../../src/index';
import * as gl_util from "../../src/gl_util";
const pngReader = require('pngjs').PNG;
function sleep(time) {
return new Promise(resolve => setTimeout(resolve, time));
}
const MNIST_IMAGES_PATH = 'https://storage.googleapis.com/learnjs-data/model-builder/mnist_images.png';
const MNIST_LABELS_PATH = 'https://storage.googleapis.com/learnjs-data/model-builder/mnist_labels_uint8';
/**
* Get the program uniforms once only
* @param gl
* @param program
* @returns {{point_tex: WebGLUniformLocation, labels_tex: WebGLUniformLocation, col_array: WebGLUniformLocation}}
*/
function getUniformLocations(gl, program, locationArray) {
let locationTable = {};
for (const locName of locationArray) {
locationTable[locName] = tf.webgl.webgl_util.getProgramUniformLocationOrThrow(
gl, program, locName);
}
return locationTable;
}
function executeHitSampleProgram(x, y) {
if (!offscreen_fbo) {
return -1;
}
const gl = backend.getGPGPUContext().gl;
gl.bindFramebuffer(gl.FRAMEBUFFER, offscreen_fbo);
// retrieve the id data for hit test.
const hit_array = new Uint32Array(1);
const READ_FRAMEBUFFER = 0x8CA8;
gl.bindFramebuffer(READ_FRAMEBUFFER, offscreen_fbo);
gl.readBuffer(gl.COLOR_ATTACHMENT0 + 1); // which buffer to read
gl.readPixels(x, y, 1, 1, gl.RED_INTEGER, gl.UNSIGNED_INT, hit_array); // read it
if (gl.getError() !== gl.NO_ERROR) {
console.log('Failed to retrieve hit value');
return 0;
}
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
return hit_array[0]
}
function createTextureToQuadProgram(gl) {
const vertexShaderSource = `#version 300 es
in vec4 a_Position;
in vec2 a_TexCoord;
out vec2 v_TexCoord;
void main() {
gl_Position = a_Position;
v_TexCoord = a_TexCoord;
}`;
const fragmentShaderSource = `#version 300 es
precision highp float;
uniform sampler2D u_Sampler;
in vec2 v_TexCoord;
out vec4 fragColor;
void main() {
fragColor = texture(u_Sampler, v_TexCoord);
}`;
return gl_util.createVertexProgram(
gl, vertexShaderSource, fragmentShaderSource);
}
/**
* Render an existing texture to a rectangle on the canvas.
* Any information at that position is overwritten
* @param gpgpu - the backend gpgpu object
* @param program - the screen quad drawing program
* @param uniforms - an object containing the uniform loactions for the program
* @param texture - the texture to be rendered
* @param width - the width of canvas rectangle
* @param height - the height of the canvas rectangle
* @param left - the left edge of the canvas rectangle
* @param bottom - the bottom edge of the canvas rectangle
* @returns {number}
*/
function executeRenderTextureToScreenQuad(
gpgpu, program, uniforms, texture,
width, height, left, bottom) {
const gl = gpgpu.gl;
//const oldProgram = gpgpu.program;
gpgpu.setProgram(program);
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));
// clear the target with a light grey and blend the render
gl.enable(gl.SCISSOR_TEST);
gl.enable(gl.DEPTH_TEST);
gl.scissor(left, bottom, width, height);
gl.viewport(left, bottom, width, height);
gl.clearColor(1.0,1.0,1.0,1.);
gl.enable(gl.BLEND);
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
// vertex and texture coords in one buffer
const vertexCoords = new Float32Array([
-1.0, -1.0, 0.0, 0.0,
1.0, -1.0, 1.0, 0.0,
1.0, 1.0, 1.0, 1.0,
-1.0, -1.0, 0.0, 0.0,
1.0, 1.0, 1.0, 1.0,
-1.0, 1.0, 0.0, 1.0
]);
const vertexCoordsBuffer = tf.webgl.webgl_util.createStaticVertexBuffer(
gl, vertexCoords);
const FSIZE = vertexCoords.BYTES_PER_ELEMENT;
// position offset = 0
tf.webgl.webgl_util.bindVertexBufferToProgramAttribute(
gl, program, 'a_Position', vertexCoordsBuffer, 2, FSIZE * 4, 0);
// tex coord offset = FSIZE * 2
tf.webgl.webgl_util.bindVertexBufferToProgramAttribute(
gl, program, 'a_TexCoord', vertexCoordsBuffer, 2, FSIZE * 4, FSIZE * 2);
gpgpu.setInputMatrixTexture(texture, uniforms.u_Sampler, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawArrays(gl.TRIANGLES, 0, 6); // Draw the rectangle
gl.deleteBuffer(vertexCoordsBuffer);
gl.disable(gl.BLEND);
gl.disable(gl.SCISSOR_TEST);
gl.disable(gl.DEPTH_TEST);
}
function createTextureDisplayProgram(gl) {
var vertexShaderSource = `#version 300 es
precision highp float;
in vec4 a_position;
in vec2 a_texcoord;
out vec2 v_texCoord;
void main() {
// Vertex shader output
gl_Position = a_position;
v_texCoord = a_texcoord;
}`;
const colorTextureShader = `#version 300 es
precision highp float;
// Interpolated 0-1 fragment coords from vertex shader
in vec2 v_texCoord;
uniform sampler2D u_image;
uniform int comp_idx;
uniform float tex_norm;
uniform float scale_s_field;
out vec4 fragmentColor;
vec4 scaleRedWhiteBlue(float val) {
float red = step(-0.05, val); //slight bias in the values
float blue = 1. - red;
return mix(
vec4(1.,1.,1.,1.),
vec4(1.,0.,0.,1.) * red + vec4(0.,0.,1.,1.) * blue,
abs(val)
);
}
vec4 scaleRedWhite(float val) {
return mix(
vec4(1.,1.,1.,1.),
vec4(1.,0.,0.,1.),
val
);
}
void main() {
float fieldVal;
// Look up a color from the texture.
switch (comp_idx) {
case 0:
fieldVal = clamp(scale_s_field * texture(u_image, v_texCoord).r/(tex_norm/3.), 0., 1.);
fragmentColor = scaleRedWhite(fieldVal);
break;
case 1:
fieldVal = clamp(texture(u_image, v_texCoord).g/(tex_norm/8.), -1., 1.);
fragmentColor = scaleRedWhiteBlue(fieldVal);
break;
case 2:
fieldVal = clamp(texture(u_image, v_texCoord).b/(tex_norm/8.), -1., 1.);
fragmentColor = scaleRedWhiteBlue(fieldVal);
break;
default:
fragmentColor = vec4(0., 0., 0., 1.);
}
}`;
return gl_util.createVertexProgram(
gl, vertexShaderSource, colorTextureShader);
}
/**
*
* @param sourceGl - source gl
* @param texture - the texture to be rendered
* @param format - pixelData format - eg. gl.RGB, gl.RGBA
* @param type - pixelData type = eg. gl.FLOAT, gl.UNSIGNED_BYTE
* @param targetGl - if null the texture data is rendered on the source gl
* otherwise texture data is read and copied to the target.
*/
function executeTextureDisplayProgram(
gpgpu, program, uniforms, texture, textureNorm, numPoints, index,
width, height, left, bottom) {
const gl = gpgpu.gl;
//const oldProgram = gpgpu.program;
gpgpu.setProgram(program);
// this is the backend canvas - clear the display window
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.bindFramebuffer(gl.FRAMEBUFFER, null));
gl.enable(gl.SCISSOR_TEST);
gl.enable(gl.DEPTH_TEST);
gl.scissor(left, bottom, width, height);
gl.viewport(left, bottom, width, height);
gl.clearColor(1., 1., 1., 1.);
tf.webgl.webgl_util.callAndCheck(
gl, () =>gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT));
gl.enable(gl.BLEND);
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
const quadVertices = new Float32Array([
-1,-1,
1,-1,
1, 1,
-1,-1,
1, 1,
-1, 1]);
// create and load buffers for the geometry vertices and indices
const quad_buffer = tf.webgl.webgl_util.createStaticVertexBuffer(gl, quadVertices);
tf.webgl.webgl_util.bindVertexBufferToProgramAttribute(
gl, program, 'a_position', quad_buffer, 2, 0, 0);
const texCoord = new Float32Array([
0, 0,
1, 0,
1, 1,
0, 0,
1, 1,
0, 1]);
const texc_buff = tf.webgl.webgl_util.createStaticVertexBuffer(gl, texCoord);
tf.webgl.webgl_util.bindVertexBufferToProgramAttribute(
gl, program, 'a_texcoord', texc_buff, 2, 0, 0);
gpgpu.setInputMatrixTexture(texture, uniforms.u_image, 0);
gl.uniform1i(uniforms.comp_idx, index);
gl.uniform1f(uniforms.tex_norm, textureNorm);
let scale_s_field = 1;
if (numPoints < 2000) {
scale_s_field -= 0.9 * (2000 - numPoints)/2000;
}
gl.uniform1f(uniforms.scale_s_field, scale_s_field);
// Create the buffer object
// Draw the quad
gl.drawArrays(gl.TRIANGLES, 0, 6);
gl.disable(gl.BLEND);
gl.disable(gl.SCISSOR_TEST);
}
/**
* Render the points and point ids
* to two separate render targets
* @param gpgpu
* @param numLabels
* @returns {WebGLProgram}
*/
function createPointsToTexturesProgram(gl) {
const vertexShaderSource = `#version 300 es
precision highp float;
precision highp int;
in float vertex_id;
in vec3 label_color;
uniform sampler2D point_tex;
uniform float points_per_row;
uniform vec2 minV;
uniform vec2 maxV;
out float p_id;
out vec4 color;
void main() {
int pointNum = int(vertex_id);
int row = int(floor(vertex_id/points_per_row));
int col = int(mod(vertex_id, points_per_row));
float x_pnt = texelFetch(point_tex, ivec2(2 * col + 0, row), 0).r;
float y_pnt = texelFetch(point_tex, ivec2(2 * col + 1, row), 0).r;
// point coord from embedding to -1,-1 space
vec2 point_coords = (vec2(x_pnt, y_pnt) - minV)/(maxV - minV); // 0, 1 space
point_coords = (point_coords * 2.0) - 1.0; // -1, -1 space
// color lookup based on point label
color = vec4(label_color, 1.0);
gl_Position = vec4(point_coords, 0, 1);
gl_PointSize = 4.;
p_id = vertex_id + 1.;
}
`;
const fragmentShaderSource = `#version 300 es
precision highp float;
precision highp int;
layout(location = 0) out vec4 plot_tex;
layout(location = 1) out uint id_tex;
in vec4 color;
in float p_id;
void main() {
float r = 0.0, delta = 0.0, alpha = 1.0;
vec2 cxy = 2.0 * gl_PointCoord - 1.0;
r = dot(cxy, cxy);
delta = fwidth(r);
alpha = 1.0 - smoothstep(1.0 - delta, 1.0 + delta, r);
plot_tex = vec4(color.rgb, alpha);
id_tex = uint(p_id);
}
`;
return gl_util.createVertexProgram(
gl, vertexShaderSource, fragmentShaderSource);
}
/**
*
* @param gl
* @param width
* @param height
* @param pixels
* @returns {WebGLTexture}
*/
function createAndConfigureUint32Texture(gl, width, height, pixels) {
const texture = tf.webgl.webgl_util.createTexture(gl);
// begin texture ops
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.bindTexture(gl.TEXTURE_2D, texture));
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE));
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE));
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST));
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST));
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.texImage2D(gl.TEXTURE_2D, 0, gl.R32UI, width, height,
0, gl.RED_INTEGER, gl.UNSIGNED_INT, pixels));
// end texture ops
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.bindTexture(gl.TEXTURE_2D, null));
return texture;
}
let hit_texture = null;
let plot_tex = null;
let offscreen_fbo = null;
function initOffscreenState(gl, width, height) {
if (offscreen_fbo === null) {
offscreen_fbo = gl.createFramebuffer();
}
gl.bindFramebuffer(gl.FRAMEBUFFER, offscreen_fbo);
if (plot_tex === null) {
plot_tex = gl_util.createAndConfigureTexture(gl, width, height, 4);
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, plot_tex, 0));
}
if (hit_texture === null) {
hit_texture = createAndConfigureUint32Texture(gl, width, height);
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + 1, gl.TEXTURE_2D, hit_texture, 0));
}
}
function clearOffscreenState() {
hit_texture = null;
plot_tex = null;
offscreen_fbo = null;
}
/**
* Render the embedding to a normaf RGB texture and the
* @param gpgpu
* @param program
* @param uniforms
* @param pointTex
* @param width
* @param height
* @param left
* @param bottom
* @param numPoints
* @param pointsPerRow
* @param minX
* @param minY
* @param maxX
* @param maxY
*/
function executeOffscreenPointRender(
gpgpu, program, uniforms, pointTex,
width, height, numPoints, pointsPerRow,
minX, minY, maxX, maxY) {
const gl = gpgpu.gl;
// aspect-ratio preserving scaling
if (maxX - minX > maxY - minY) {
maxY = (maxY + minY) / 2 + (maxX - minX) / 2;
minY = (maxY + minY) / 2 - (maxX - minX) / 2;
}
else {
maxX = (maxX + minX) / 2 + (maxY - minY) / 2;
minX = (maxX + minX) / 2 - (maxY - minY) / 2;
}
// set up attributes and uniforms and render
//const oldProgram = gpgpu.program;
gpgpu.setProgram(program);
// create two draw buffer textures
initOffscreenState(gl, width, height);
gl.enable(gl.BLEND);
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
// clear both textures
let attachBufs = [gl.COLOR_ATTACHMENT0, gl.COLOR_ATTACHMENT0 + 1];
tf.webgl.webgl_util.callAndCheck(
gl, ()=> gl.drawBuffers(attachBufs ));
tf.webgl.webgl_util.callAndCheck(gl, () =>gl.clearBufferfv(gl.COLOR, 0, [0.0,0.0,0.0,0.0]));
tf.webgl.webgl_util.callAndCheck(gl, () =>gl.clearBufferuiv(gl.COLOR, 1, [0,0,0,0]));
gl.viewport(0, 0, width, height);
tf.webgl.webgl_util.bindVertexBufferToProgramAttribute(
gl, program, 'vertex_id', vertexIdBuffer, 1, 0, 0);
tf.webgl.webgl_util.bindVertexBufferToProgramAttribute(
gl, program, 'label_color', labelColorBuffer, 3, 0, 0);
gpgpu.setInputMatrixTexture(pointTex, uniforms.point_tex, 0);
gl.uniform1f(uniforms.points_per_row, pointsPerRow);
gl.uniform2f(uniforms.minV, minX, minY);
gl.uniform2f(uniforms.maxV, maxX, maxY);
tf.webgl.webgl_util.callAndCheck(
gl, () => gl.drawArrays(gl.POINTS, 0, numPoints));
gl.disable(gl.BLEND);
}
function clearBackground(gl) {
tf.webgl.webgl_util.bindCanvasToFramebuffer(gl);
gl.enable(gl.DEPTH_TEST);
gl.disable(gl.SCISSOR_TEST);
gl.clearColor(1.0, 1.0, 1.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
}
let labelColor;
let labelColorBuffer;
let vertexId;
let vertexIdBuffer;
/**
* Set the fixed vertex buffers for this number of points and label colors
* @param numPoints
* @param colors
*/
function initBuffers(numPoints, colors) {
let gl = backend.getGPGPUContext().gl;
vertexId = new Float32Array([...Array(numPoints).keys()]);
vertexIdBuffer =
tf.webgl.webgl_util.createStaticVertexBuffer(gl, vertexId);
labelColor = colors;
labelColorBuffer =
tf.webgl.webgl_util.createStaticVertexBuffer(gl, labelColor);
}
let pointToTexProgram;
let pointToTexUniforms;
let textureDisplayProgram;
let textureDisplayUniforms;
let textureToQuadProgram;
let textureToQuadUniforms;
/**
* Set the WebGL environment used for plotting
*
* numPoints: number of points in this tsne session
* gpgpu: the
*/
function initPlotPrograms() {
let gpgpu = backend.getGPGPUContext();
let gl = gpgpu.gl;
pointToTexProgram = createPointsToTexturesProgram(gl);
const pointToTexUniformList = ['point_tex', 'points_per_row', 'minV', 'maxV'];
pointToTexUniforms = getUniformLocations(gl, pointToTexProgram, pointToTexUniformList);
textureDisplayProgram = createTextureDisplayProgram(gl);
const textureUniformsList = ['u_image', 'comp_idx', 'tex_norm', 'scale_s_field'];
textureDisplayUniforms = getUniformLocations(gl, textureDisplayProgram, textureUniformsList);
textureToQuadProgram = createTextureToQuadProgram(gl);
const genericUniformsList = ['u_Sampler'];
textureToQuadUniforms = getUniformLocations(gl, textureToQuadProgram, genericUniformsList);
}
// Reduce the MNIST images to newWidth newHeight
// and take the first numImages
function subsampleTensorImages(tensor,
oldWidth,
oldHeight,
newWidth,
newHeight,
numImages) {
const subSet = tensor.slice([0,0], [numImages]).
as4D(numImages, oldHeight, oldWidth, 1);
return subSet.resizeBilinear([newHeight, newWidth]).
reshape([numImages, newWidth*newHeight]);
}
let displayObjects = {};
/**
* Assemble a list of all the elements where the WebGL plots
* and other data will be placed
* Plots are placed in divs with a specific width
* height and position.
*/
function initDisplayObjects() {
displayObjects = {};
const textPlots = document.getElementsByClassName('texturePlot');
for (let element of textPlots) {
displayObjects[element.id] = {
element: element,
uniforms: {},
data: null
};
}
const scatterPlot = document.getElementById('scatterPlot');
displayObjects['scatterPlot'] = {
element: scatterPlot,
uniforms: {},
data: null
};
displayObjects['knnIter'] = {
element: document.getElementById('knnIterCount'),
data: null
};
displayObjects['status'] = {
element: document.getElementById('displayStatus'),
data: null
};
return displayObjects
}
function clearWebglData() {
const gl = backend.getGPGPUContext().gl;
if (!gl) {
return;
}
gl.deleteTexture(hit_texture);
gl.deleteTexture(plot_tex);
gl.deleteFramebuffer(offscreen_fbo);
}
function initCanvas() {
initDisplayObjects();
clearWebglData();
const digitCanv = document.getElementById('digitCanv');
const digitCanvCtx = digitCanv.getContext('2d');
blankCanvas(digitCanvCtx);
clearBackground(backend.getGPGPUContext().gl);
return {digitCanvCtx: digitCanvCtx};
}
/**
* Set a canvas context to white and return the associated
* imageData and underlying data buffer for further manipulation.
* @param ctx
* @returns {{imgData: ImageData, pixArray: Uint8ClampedArray}}
*/
function blankCanvas(ctx) {
const imgData = ctx.getImageData(0,0,ctx.canvas.width, ctx.canvas.height);
const pixArray = new Uint8ClampedArray(imgData.data.buffer);
// zero the buffer for the cumulative plot (black
const fillArray = new Uint32Array(imgData.data.buffer);
fillArray.fill(0xFFFFFFFF); //little endian
ctx.putImageData(imgData, 0, 0);
return {imgData: imgData, pixArray: pixArray};
}
/**
* MNIST labels are stored as 65000x10 onehot encoding
* convert this to label number
* @param labels
* @returns {Int32Array}
*/
function oneHotToIndex(labels) {
return tf.tidy(() => {
const oneHotTensor = tf.tensor2d(labels, [65000, 10], 'int32');
const labelPosTensor = tf.tensor1d([0,1,2,3,4,5,6,7,8,9], 'int32');
const labelsTensor = oneHotTensor.mul(labelPosTensor).sum(1);
return labelsTensor.dataSync();
});
}
/**
* Get a promise that loads the MNIST data.
* @returns {Promise<*>}
*/
async function loadMnist() {
//const resp = await fetch('../../images/mnist_images.png');
const resp = await fetch(MNIST_IMAGES_PATH);
const imgArray = await resp.arrayBuffer();
const reader = new pngReader();
return new Promise ((resolve) => {
reader.parse(imgArray, (err, png) => {
// parsed PNG is Uint8 RGBA with range 0-255
// - convert to RGBA Float32 range 0-1
const pixels = new Float32Array(png.data.length/4);
for (let i = 0; i < pixels.length; i++) {
pixels[i] = png.data[i*4]/255.0;
}
resolve(pixels);
});
});
}
/**
* Get a promise that loads the MNIST label data
* @returns {Promise<ArrayBuffer>}
*/
async function loadMnistLabels() {
const resp = await fetch(MNIST_LABELS_PATH);
return resp.arrayBuffer();
}
/**
* A global to hold the MNIST data
*/
let dataSet;
/**
* A global to hold the MNIST label data
*/
let labelSet;
let cancel = false;
let enableViz = false;
let backend;
let maxSize = 0;
function clearBackendCanvas() {
let gl = backend.getGPGPUContext().gl;
tf.webgl.webgl_util.bindCanvasToFramebuffer(gl);
gl.clearColor(1, 1, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
}
let webGlCanvas;
/**
* Make a page spanning canvas from the backend context.
*/
function initBackend() {
backend = tf.ENV.findBackend('webgl');
// inject backend canvas for drawing
webGlCanvas = backend.getCanvas();
const bodyList = document.getElementsByTagName('body');
const body = bodyList[0];
let offset = 0;
for (let node of body.childNodes) {
if (node.id === 'canvasContainer') {
break;
}
offset++;
}
body.replaceChild(webGlCanvas, body.childNodes[offset]);
webGlCanvas.id = "wholePageCanvas";
webGlCanvas.style = "width:100vw; height:100vh; margin-top: 0 !important; margin-left: 0 !important; position:absolute; top:0; display:block;";
let gl = backend.getGPGPUContext().gl;
gl.getExtension('EXT_float_blend');
maxSize= gl.getParameter(gl.MAX_TEXTURE_SIZE);
gl.canvas.width = gl.canvas.offsetWidth;
gl.canvas.height = gl.canvas.offsetHeight;
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
clearBackendCanvas();
}
/**
* Resize the canvas if the clientWidth has changed
* @param gl
*/
function resizeCanvas(gl) {
// resize the canvas
const cwidth = gl.canvas.clientWidth;
const cheight = gl.canvas.clientHeight;
if (gl.canvas.width != cwidth || gl.canvas.height != cheight) {
gl.canvas.width = cwidth;
gl.canvas.height = cheight;
}
}
function getLimits(element, gl) {
const rect = element.getBoundingClientRect();
if (rect.bottom < 0 || rect.top > gl.canvas.clientHeight ||
rect.right < 0 || rect.left > gl.canvas.clientWidth) {
return [false, -1, -1, -1, -1]; // it's off screen
}
const width = rect.right - rect.left;
const height = rect.bottom - rect.top;
const left = rect.left;
const bottom = gl.canvas.clientHeight - rect.bottom - 1;
return [true, width, height, left, bottom];
}
function displayTextures() {
const gl = backend.getGPGPUContext().gl;
const textureIds = ['textureR', 'textureG', 'textureB'];
let count = 0;
textureIds.forEach((id, idx) => {
const plotObject = displayObjects[id];
const data = plotObject.data;
if (!data) {
// nothing to render
return;
}
let OK, width, height, left, bottom;
[OK, width, height, left, bottom] = getLimits(plotObject.element, gl);
if (!OK) {
return;
}
executeTextureDisplayProgram(
backend.getGPGPUContext(), textureDisplayProgram, textureDisplayUniforms,
data.splatTexture, data.normalizeTex, data.numPoints, idx,
width, height, left, bottom);
count++;
});
}
let lastRenderTime = 0;
let lastRenderItern = 0;
/**
* Display the embedding as a scatter plot.
*
* tsneOpt: instance of tf_tsne.tsne containing coordinates to be plotted
* labelsTensor: 1D tensor containing labels 0 to 9 for each point
* plotSize: size of (square) plot target
*/
function displayScatterPlot(now) {
if (!enableViz) {
return;
}
const gpgpu = backend.getGPGPUContext();
const gl = gpgpu.gl;
resizeCanvas(gl);
webGlCanvas.style.transform = `translateY(${window.scrollY}px)`;
const plotObject = displayObjects['scatterPlot'];
const data = plotObject.data;
if (!data) {
// nothing to render
return;
}
const tsneIterElement = document.getElementById('tsneIterCount');
tsneIterElement.innerHTML = 'tsne iteration: ' + data.iteration;
// limit to 5 frames per sec
if ((now - lastRenderTime) < 200) {
return;
}
lastRenderTime = now;
clearBackground(gl);
let OK, width, height, left, bottom;
[OK, width, height, left, bottom] = getLimits(plotObject.element, gl);
if (!OK) {
return;
}
const oldProgram = gpgpu.program;
if (data.iteration !== lastRenderItern) {
lastRenderItern = data.iteration;
// Render the embedding points offscreen along with a hit texture.
executeOffscreenPointRender(
backend.getGPGPUContext(), pointToTexProgram, pointToTexUniforms, data.coords,
width, height, data.numPoints, data.pointsPerRow,
data.minX, data.minY, data.maxX, data.maxY);
}
executeRenderTextureToScreenQuad(
backend.getGPGPUContext(), textureToQuadProgram, textureToQuadUniforms,
plot_tex, width, height, left, bottom
);
displayTextures();
if (oldProgram != null) {
gpgpu.setProgram(oldProgram);
tf.webgl.gpgpu_util.bindVertexProgramAttributeStreams(
gl, oldProgram, gpgpu.vertexBuffer);
}
};
/**
* Array o integer RGB colors and make a Float32Array
* containing 3 rgb components
*
* @param colArray
*/
function colToFloatComp(colArray) {
const nestedComponents = colArray.map(x =>
[((x >> 16) & 0xFF)/255, ((x >> 8) & 0xFF)/255, (x & 0xFF)/255]
);
// flatten the array of arrays
return new Float32Array([].concat.apply([], nestedComponents));
}
/**
* Run tf-tsne on the MNIST and plot the data points
* in a simple interactive canvas.
* @returns {Promise<void>}
*/
async function runTsne() {
cancel = false;
// The MNIST set is preshuffled just load it.
const allMnistTensor = tf.tensor(dataSet).reshape([65000, 784]);
const numberPoints = parseInt(document.getElementById('numPointsSlider').value);
// subset and downsample the images
const subTensor = subsampleTensorImages(allMnistTensor,
28,
28,
28,
28,
numberPoints);
allMnistTensor.dispose();
// match the number of labels to the points subset
const subLabels = labelSet.slice(0, numberPoints);
const labelColors = colToFloatComp([0xFF0000,
0xFF9900,
0xCCFF00,
0x33FF00,
0x00FF66,
0x00FFFF,
0x0066FF,
0x3300FF,
0xCC00FF,
0xFF0099]);
const colors = new Float32Array(numberPoints * 3);
subLabels.forEach((val, idx) => {
colors [idx * 3] = labelColors[val * 3];
colors [idx * 3 + 1] = labelColors[val * 3 + 1];
colors [idx * 3 + 2] = labelColors[val * 3 + 2];
});
initBuffers(numberPoints, colors);
console.log(`calculating on: ${subTensor.shape}`);
const perplexity = numberPoints < 240 ? Math.floor(numberPoints/8) : 30;
const tsneOpt = tf_tsne.tsne(subTensor, {
perplexity : perplexity,
verbose : true,
knnMode : 'auto',
});
const maxKnnIters = document.getElementById('kNNSlider').value;
const knnIterations = Math.min(tsneOpt.knnIterations(), maxKnnIters);
await runAndDisplayKnn(tsneOpt, knnIterations);
await runAndDisplayTsne(tsneOpt, 1000, numberPoints);
cancel = true;
console.log(`Tsne done`);
tf.dispose(subTensor);
tsneOpt.optimizer.dispose();
}
async function runAndDisplayKnn(tsneOpt, nIters)
{
console.log('started kNN');
displayObjects['status'].element.innerHTML = '...running kNN';
await sleep(1);
for (let iterCount = 0; iterCount < nIters; iterCount++) {
await tsneOpt.iterateKnn(1);
displayObjects['knnIter'].element.innerHTML = 'knn iteration: ' + (iterCount + 1);
if (iterCount === 0) {
enableViz = true;
}
await sleep(1);
}
displayObjects['status'].element.innerHTML = '';
}
/**
* support globals for texture copying
*/
let dstFbo;
let srcFbo;
let splatTexCopy;
let embeddingClone = null;
/**
* Create the source and destination framebuffer objects once
* Setc srcFbo and dstFbo
*/
function initTextureCopy() {
let gl = backend.getGPGPUContext().gl;
dstFbo = gl.createFramebuffer();
srcFbo = gl.createFramebuffer();
}
/**
* Blit copy the source text and return the copy
* Uses the srcFbo and dstFbo framebuffers created
* by initTextureCopy
* @param srcTexture
* @returns {WebGLTexture}
*/
function makeTextureCopy(srcTexture, width, height) {
let gl = backend.getGPGPUContext().gl;
gl.bindFramebuffer(gl.FRAMEBUFFER, srcFbo);
gl.framebufferTexture2D(gl.READ_FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, srcTexture, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER, dstFbo);
const dstTexture = gl_util.createAndConfigureTexture(
gl,
width,
height,
4, null);
gl.framebufferTexture2D(gl.DRAW_FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, dstTexture, 0);
gl.bindFramebuffer ( gl.DRAW_FRAMEBUFFER, dstFbo );
gl.bindFramebuffer ( gl.READ_FRAMEBUFFER, srcFbo );
gl.blitFramebuffer(0, 0, width, height, 0, 0, width, height,
gl.COLOR_BUFFER_BIT, gl.NEAREST);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
return dstTexture;
}
/**
* Wrap tsne in an async iterator to decouple from the display.
* @param tsneOpt
* @param nIterations
* @param numPoints
* @returns {AsyncIterableIterator<{iteration: number, numPoints: *, coords: WebGLTexture, pointsPerRow: number, minX: number, maxX: number, minY: number, maxY: number, splatTexture: WebGLTexture, splatDiameter}>}
*/
async function* tsneResultGenerator(tsneOpt, nIterations, numPoints) {
let count = 0;
displayObjects['status'].element.innerHTML = '...running tSNE';
while (count < nIterations) {
await tsneOpt.iterate(1);
if (count === 0) {
initTextureCopy();
}
// Copy the splat texture in order to display it asynchronously.
// It is constantly recreated in the tsne algorithm.
if (splatTexCopy) {
const gl = backend.getGPGPUContext().gl;
gl.deleteTexture(splatTexCopy);
splatTexCopy = null;
}
splatTexCopy = makeTextureCopy(
tsneOpt.optimizer.splatTexture,
tsneOpt.optimizer.splatTextureDiameter,
tsneOpt.optimizer.splatTextureDiameter);
if (embeddingClone) {
embeddingClone.dispose();
embeddingClone = null;
}
embeddingClone = tf.clone(tsneOpt.optimizer.embedding);
lastRenderTime = 0; // force render
yield {
iteration: count + 1,
numPoints: numPoints,
coords: backend.getTexture(embeddingClone.dataId),
pointsPerRow: tsneOpt.optimizer.numberOfPointsPerRow,
minX: tsneOpt.optimizer.minX,
maxX: tsneOpt.optimizer.maxX,
minY: tsneOpt.optimizer.minY,
maxY: tsneOpt.optimizer.maxY,
splatTexture: splatTexCopy,
diameter: tsneOpt.optimizer.splatTextureDiameter,
normalizeTex: Math.sqrt(tsneOpt.optimizer.normalizationQ) * tsneOpt.optimizer.exaggerationAtCurrentIteration
};
if (cancel) {
cancel = false;
tsneOpt.optimizer.dispose();
break;
}
count++;
}
displayObjects['status'].element.innerHTML = '';
}
/**
* Display iteration results in an animation frame
*/
function displayIterInfo() {
requestAnimationFrame(displayIterInfo);
displayScatterPlot();
}
async function runAndDisplayTsne(tsneOpt, nIterations, numPoints)
{
window.requestAnimationFrame(displayIterInfo);
console.log('started RAF');
for await (const iterInfo of tsneResultGenerator(tsneOpt, nIterations, numPoints)) {
displayObjects['scatterPlot'].data = iterInfo;
displayObjects['textureR'].data = iterInfo;
displayObjects['textureG'].data = iterInfo;
displayObjects['textureB'].data = iterInfo;
}
}
/**
* Plot the digit on the canvas
*
* @param digitCtx
* @param digitData
*/
async function digitOnCanvas(digitCtx, digitData) {
const height = digitCtx.canvas.height;
const width = digitCtx.canvas.height;
const dataPix = blankCanvas(digitCtx);
const imgData = dataPix.imgData;
const pixArray = dataPix.pixArray;
// put the digit data in a tensor and resize it
// to match the canvas
const imgTensor = tf.tensor4d(digitData, [1, 28, 28, 1]);
const resizedTensor = imgTensor.resizeNearestNeighbor([height, width]);
const resizedArray = await resizedTensor.data();
resizedArray.forEach((val, idx) => {
const pixOffset = 4 * idx;
const pixVal = 255 - (255 * val);
pixArray[pixOffset] = pixVal;
pixArray[pixOffset + 1] = pixVal;
pixArray[pixOffset + 2] = pixVal;
});
digitCtx.putImageData(imgData, 0, 0);
}
/**
* Handle the mousemove event to explore the points in the
* plot canvas.
* @param plotCanv
* @param e
*/
function plotExplore(plotCtx, digitCtx, e) {
const rect = plotCtx.getBoundingClientRect();
const x = e.clientX - rect.left;
const y = e.clientY - rect.top;
const id = executeHitSampleProgram(x, 511-y);
if (id < 1) {
return;
}
const digitData = dataSet.slice((id-1)*784, (id)*784);
digitOnCanvas(digitCtx, digitData);
}
function restart() {
cancel = true;
enableViz = false;
clearOffscreenState();
setTimeout(async ()=> {
initCanvas();
await runTsne()
}, 1000)
}
function stop() {
cancel = true;
}
function updatePoints() {
const nPoints = parseInt(document.getElementById('numPointsSlider').value);
document.getElementById('pntSliderVal').innerHTML = 'num MNIST points: ' + nPoints.toString().padStart(6, '\u2002');
}
window.onload = async function() {
initBackend();
const contexts = initCanvas();
const digitCtx = contexts.digitCanvCtx;
updatePoints();
initPlotPrograms();
displayObjects['status'].element.innerHTML = '...downloading MNIST data';
dataSet = await loadMnist();
const labelOneHot = new Uint8Array(await loadMnistLabels());
labelSet = oneHotToIndex(labelOneHot);
displayObjects['status'].element.innerHTML = '';
document.getElementById('kNNSlider').oninput = () => {
document.getElementById('sliderVal').innerHTML = 'max kNN iterations: ' + document.getElementById('kNNSlider').value;
};
document.getElementById('numPointsSlider').oninput = updatePoints;
const plotCtx = document.getElementById('scatterPlot');
plotCtx.addEventListener('mousemove', plotExplore.bind(null, plotCtx, digitCtx));
document.getElementById('restartButton').addEventListener('click', restart);
document.getElementById('stopButton').addEventListener('click', stop);
};
|
tensorflow/tfjs-tsne
|
examples/mnist_data/index.js
|
JavaScript
|
apache-2.0
| 34,521
|
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.captcha.internal;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.service.component.ComponentContext;
import org.wso2.carbon.identity.application.authentication.framework.AuthenticationDataPublisher;
import org.wso2.carbon.identity.captcha.connector.CaptchaConnector;
import org.wso2.carbon.identity.captcha.connector.recaptcha.PasswordRecoveryReCaptchaConnector;
import org.wso2.carbon.identity.captcha.connector.recaptcha.ResendConfirmationReCaptchaConnector;
import org.wso2.carbon.identity.captcha.connector.recaptcha.SSOLoginReCaptchaConfig;
import org.wso2.carbon.identity.captcha.connector.recaptcha.SelfSignUpReCaptchaConnector;
import org.wso2.carbon.identity.captcha.connector.recaptcha.UsernameRecoveryReCaptchaConnector;
import org.wso2.carbon.identity.captcha.util.CaptchaUtil;
import org.wso2.carbon.identity.captcha.validator.FailLoginAttemptValidationHandler;
import org.wso2.carbon.identity.captcha.validator.FailLoginAttemptValidator;
import org.wso2.carbon.identity.event.handler.AbstractEventHandler;
import org.wso2.carbon.identity.governance.IdentityGovernanceService;
import org.wso2.carbon.identity.governance.common.IdentityConnectorConfig;
import org.wso2.carbon.identity.handler.event.account.lock.service.AccountLockService;
import org.wso2.carbon.user.core.service.RealmService;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferenceCardinality;
import org.osgi.service.component.annotations.ReferencePolicy;
@Component(
name = "org.wso2.carbon.identity.captcha.internal.CaptchaComponent",
immediate = true)
public class CaptchaComponent {
private static final Log log = LogFactory.getLog(CaptchaComponent.class);
@Activate
protected void activate(ComponentContext context) {
try {
// Initialize reCaptcha.
CaptchaUtil.buildReCaptchaFilterProperties();
// Initialize and register SSOLoginReCaptchaConfig.
IdentityConnectorConfig connector = new SSOLoginReCaptchaConfig();
((SSOLoginReCaptchaConfig) connector).init(CaptchaDataHolder.getInstance().getIdentityGovernanceService());
context.getBundleContext().registerService(IdentityConnectorConfig.class, connector, null);
CaptchaDataHolder.getInstance().addCaptchaConnector((SSOLoginReCaptchaConfig) connector);
// Initialize and register PathBasedReCaptchaConnector.
CaptchaConnector captchaConnector = new SelfSignUpReCaptchaConnector();
captchaConnector.init(CaptchaDataHolder.getInstance().getIdentityGovernanceService());
CaptchaDataHolder.getInstance().addCaptchaConnector(captchaConnector);
// Initialize and register UsernameRecoveryReCaptchaConnector.
captchaConnector = new UsernameRecoveryReCaptchaConnector();
captchaConnector.init(CaptchaDataHolder.getInstance().getIdentityGovernanceService());
CaptchaDataHolder.getInstance().addCaptchaConnector(captchaConnector);
// Initialize and register PasswordRecoveryReCaptchaConnector.
captchaConnector = new PasswordRecoveryReCaptchaConnector();
captchaConnector.init(CaptchaDataHolder.getInstance().getIdentityGovernanceService());
CaptchaDataHolder.getInstance().addCaptchaConnector(captchaConnector);
// Initialize and register ResendConfirmationReCaptchaConnector.
captchaConnector = new ResendConfirmationReCaptchaConnector();
captchaConnector.init(CaptchaDataHolder.getInstance().getIdentityGovernanceService());
CaptchaDataHolder.getInstance().addCaptchaConnector(captchaConnector);
AuthenticationDataPublisher failedLoginAttemptValidator = new FailLoginAttemptValidator();
context.getBundleContext().registerService(AuthenticationDataPublisher.class,
failedLoginAttemptValidator, null);
context.getBundleContext().registerService(AbstractEventHandler.class.getName(), new
FailLoginAttemptValidationHandler(), null);
if (log.isDebugEnabled()) {
log.debug("Captcha Component is activated");
}
} catch (Throwable e) {
log.error("Failed to start CaptchaComponent", e);
}
}
@Deactivate
protected void deactivate(ComponentContext context) {
if (log.isDebugEnabled()) {
log.debug("Captcha Component is de-activated");
}
}
@Reference(
name = "CaptchaConnectors",
service = CaptchaConnector.class,
cardinality = ReferenceCardinality.MULTIPLE,
policy = ReferencePolicy.DYNAMIC,
unbind = "unsetCaptchaConnector")
protected void setCaptchaConnector(CaptchaConnector captchaConnector) {
CaptchaDataHolder.getInstance().addCaptchaConnector(captchaConnector);
}
protected void unsetCaptchaConnector(CaptchaConnector captchaConnector) {
CaptchaDataHolder.getInstance().getCaptchaConnectors().remove(captchaConnector);
}
@Reference(
name = "IdentityGovernanceConnectors",
service = org.wso2.carbon.identity.governance.common.IdentityConnectorConfig.class,
cardinality = ReferenceCardinality.MULTIPLE,
policy = ReferencePolicy.DYNAMIC,
unbind = "unsetIdentityGovernanceConnector")
protected void setIdentityGovernanceConnector(IdentityConnectorConfig identityConnectorConfig) {
if (identityConnectorConfig instanceof CaptchaConnector && CaptchaDataHolder.getInstance()
.getCaptchaConnectors().contains(identityConnectorConfig)) {
CaptchaDataHolder.getInstance().addCaptchaConnector((CaptchaConnector) identityConnectorConfig);
}
}
protected void unsetIdentityGovernanceConnector(IdentityConnectorConfig identityConnectorConfig) {
if (identityConnectorConfig instanceof CaptchaConnector) {
CaptchaDataHolder.getInstance().getCaptchaConnectors().remove(identityConnectorConfig);
}
}
@Reference(
name = "IdentityGovernanceService",
service = org.wso2.carbon.identity.governance.IdentityGovernanceService.class,
cardinality = ReferenceCardinality.MANDATORY,
policy = ReferencePolicy.DYNAMIC,
unbind = "unsetIdentityGovernanceService")
protected void setIdentityGovernanceService(IdentityGovernanceService identityGovernanceService) {
CaptchaDataHolder.getInstance().setIdentityGovernanceService(identityGovernanceService);
}
protected void unsetIdentityGovernanceService(IdentityGovernanceService identityGovernanceService) {
CaptchaDataHolder.getInstance().setIdentityGovernanceService(null);
}
@Reference(
name = "RealmService",
service = org.wso2.carbon.user.core.service.RealmService.class,
cardinality = ReferenceCardinality.MANDATORY,
policy = ReferencePolicy.DYNAMIC,
unbind = "unsetRealmService")
protected void setRealmService(RealmService realmService) {
CaptchaDataHolder.getInstance().setRealmService(realmService);
}
protected void unsetRealmService(RealmService realmService) {
CaptchaDataHolder.getInstance().setRealmService(null);
}
@Reference(
name = "AccountLockService",
service = org.wso2.carbon.identity.handler.event.account.lock.service.AccountLockService.class,
cardinality = ReferenceCardinality.MANDATORY,
policy = ReferencePolicy.DYNAMIC,
unbind = "unsetAccountLockService")
protected void setAccountLockService(AccountLockService accountLockService) {
CaptchaDataHolder.getInstance().setAccountLockService(accountLockService);
}
protected void unsetAccountLockService(AccountLockService accountLockService) {
CaptchaDataHolder.getInstance().setAccountLockService(null);
}
}
|
wso2-extensions/identity-governance
|
components/org.wso2.carbon.identity.captcha/src/main/java/org/wso2/carbon/identity/captcha/internal/CaptchaComponent.java
|
Java
|
apache-2.0
| 8,981
|
require('chai').should();
var viewTestUtils = require('./utils/view-test-utils');
var test_date = new Date();
describe('Pie Sink View', function () {
var PieView = require('../../src/views/piechart');
describe('invalid params', function() {
it('unknown top level field', function() {
viewTestUtils.verifyValidationError({
viewConstructor : PieView,
params : {
columnOrder : 'asdf'
},
errorPath : 'columnOrder',
error : {
'code' : 'UNKNOWN',
'info' : {}
}
});
});
it('non-string sliceLabels.valueFormat', function() {
viewTestUtils.verifyValidationError({
viewConstructor : PieView,
params : {
sliceLabels : {
valueFormat : 1
}
},
errorPath : 'sliceLabels.valueFormat',
error : {
'code' : 'INVALID_TYPE',
'info' : {
'type' : 'STRING'
}
}
});
});
it('non-string valueField', function() {
viewTestUtils.verifyValidationError({
viewConstructor : PieView,
params : {
valueField : []
},
errorPath : 'valueField',
error : {
'code' : 'INVALID_TYPE',
'info' : {
'type' : 'STRING'
}
}
});
});
it('non-string categoryField', function() {
viewTestUtils.verifyValidationError({
viewConstructor : PieView,
params : {
categoryField : {}
},
errorPath : 'categoryField',
error : {
'code' : 'INVALID_TYPE',
'info' : {
'type' : 'STRING'
}
}
});
});
it('radiusInner too big', function() {
viewTestUtils.verifyValidationError({
viewConstructor : PieView,
params : {
radiusInner : 150
},
errorPath : 'radiusInner',
error : {
'code' : 'OUT_OF_RANGE',
'info' : {
'threshold' : 100,
'type' : 'LESS_THAN_OR_EQUAL'
}
}
});
});
it('radiusInner too small', function() {
viewTestUtils.verifyValidationError({
viewConstructor : PieView,
params : {
radiusInner : -1
},
errorPath : 'radiusInner',
error : {
'code' : 'OUT_OF_RANGE',
'info' : {
'threshold' : 0,
'type' : 'GREATER_THAN_OR_EQUAL'
}
}
});
});
});
describe('Runtime Messages', function () {
it('waiting for data', function() {
var chart = new PieView({});
chart.setDimensions(null, 100, 100);
viewTestUtils.verifyRuntimeMessage(chart, 'WAITING_FOR_DATA');
chart.consume([
{ time : test_date, value : 1, pop : 'pop1' }
]);
viewTestUtils.verifyNoRuntimeMessages(chart);
});
it('all categories are 0', function() {
var chart = new PieView({});
chart.setDimensions(null, 100, 100);
chart.consume([
{ time : test_date, value : 0, pop : 'pop1' }
]);
viewTestUtils.verifyRuntimeMessage(chart, 'ALL_CATEGORIES_ARE_ZERO');
chart.consume([
{ time : test_date, value : 1, pop : 'pop2' }
]);
viewTestUtils.verifyNoRuntimeMessages(chart);
chart.consume([
{ time : test_date, value : 0, pop : 'pop3' }
]);
viewTestUtils.verifyNoRuntimeMessages(chart);
});
it('No Data Received', function() {
var chart = new PieView({});
chart.setDimensions(null, 100, 100);
chart.consume_eof();
viewTestUtils.verifyRuntimeMessage(chart, 'NO_DATA_RECEIVED');
});
it('Value field does not exist', function() {
var chart = new PieView({
params : {
valueField : 'v',
categoryField : 'host'
}
});
chart.setDimensions(null, 100, 100);
chart.consume([
{ time: test_date, host: 'host1', value: 1 },
{ time: test_date, host: 'host2', value: 1 },
{ time: test_date, host: 'host3', value: 1 },
{ time: test_date, host: 'host4', value: 1 },
{ time: test_date, host: 'host5', value: 1 }
]);
viewTestUtils.verifyRuntimeMessage(chart, 'VALUE_FIELD_NON_NUMERIC', {
valueField : 'v'
});
});
it('doesn\'t complain about timeless points', function() {
var chart = new PieView({});
chart.setDimensions(null, 200, 200);
chart.consume([
{
category: 'A',
value: 1
},
{
category: 'B',
value: 1
}
]);
viewTestUtils.verifyNoRuntimeMessages(chart);
});
});
});
|
juttle/juttle-viz
|
test/views/piechart.spec.js
|
JavaScript
|
apache-2.0
| 5,920
|
package ca.uhn.fhir.jpa.dao;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2016 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.annotation.PostConstruct;
import javax.persistence.NoResultException;
import javax.persistence.TemporalType;
import javax.persistence.TypedQuery;
import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseCoding;
import org.hl7.fhir.instance.model.api.IBaseMetaType;
import org.hl7.fhir.instance.model.api.IBaseOperationOutcome;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.entity.BaseHasResource;
import ca.uhn.fhir.jpa.entity.BaseTag;
import ca.uhn.fhir.jpa.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.entity.ResourceLink;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.TagDefinition;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.interceptor.IJpaServerInterceptor;
import ca.uhn.fhir.jpa.util.DeleteConflict;
import ca.uhn.fhir.jpa.util.StopWatch;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.api.TagList;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.method.RequestDetails;
import ca.uhn.fhir.rest.method.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.server.IBundleProvider;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor.ActionRequestDetails;
import ca.uhn.fhir.util.FhirTerser;
import ca.uhn.fhir.util.ObjectUtil;
@Transactional(propagation = Propagation.REQUIRED)
public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends BaseHapiFhirDao<T> implements IFhirResourceDao<T> {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseHapiFhirResourceDao.class);
@Autowired
private DaoConfig myDaoConfig;
@Autowired
protected PlatformTransactionManager myPlatformTransactionManager;
private String myResourceName;
private Class<T> myResourceType;
@Autowired(required = false)
protected ISearchDao mySearchDao;
@Autowired()
protected ISearchResultDao mySearchResultDao;
@Autowired()
protected IResourceIndexedSearchParamUriDao myResourceIndexedSearchParamUriDao;
private String mySecondaryPrimaryKeyParamName;
@Override
public void addTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm, String theLabel) {
StopWatch w = new StopWatch();
BaseHasResource entity = readEntity(theId);
if (entity == null) {
throw new ResourceNotFoundException(theId);
}
//@formatter:off
for (BaseTag next : new ArrayList<BaseTag>(entity.getTags())) {
if (ObjectUtil.equals(next.getTag().getTagType(), theTagType) &&
ObjectUtil.equals(next.getTag().getSystem(), theScheme) &&
ObjectUtil.equals(next.getTag().getCode(), theTerm)) {
return;
}
}
//@formatter:on
entity.setHasTags(true);
TagDefinition def = getTag(TagTypeEnum.TAG, theScheme, theTerm, theLabel);
BaseTag newEntity = entity.addTag(def);
myEntityManager.persist(newEntity);
myEntityManager.merge(entity);
ourLog.info("Processed addTag {}/{} on {} in {}ms", new Object[] { theScheme, theTerm, theId, w.getMillisAndRestart() });
}
@Override
public DaoMethodOutcome create(final T theResource, RequestDetails theRequestDetails) {
return create(theResource, null, true, theRequestDetails);
}
@Override
public DaoMethodOutcome create(final T theResource, String theIfNoneExist, RequestDetails theRequestDetails) {
return create(theResource, theIfNoneExist, true, theRequestDetails);
}
@Override
public DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, RequestDetails theRequestDetails) {
if (isNotBlank(theResource.getIdElement().getIdPart())) {
if (getContext().getVersion().getVersion().equals(FhirVersionEnum.DSTU1)) {
if (theResource.getIdElement().isIdPartValidLong()) {
String message = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "failedToCreateWithClientAssignedNumericId", theResource.getIdElement().getIdPart());
throw new InvalidRequestException(message, createErrorOperationOutcome(message, "processing"));
}
} else {
String message = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "failedToCreateWithClientAssignedId", theResource.getIdElement().getIdPart());
throw new InvalidRequestException(message, createErrorOperationOutcome(message, "processing"));
}
}
return doCreate(theResource, theIfNoneExist, thePerformIndexing, new Date(), theRequestDetails);
}
public IBaseOperationOutcome createErrorOperationOutcome(String theMessage, String theCode) {
return createOperationOutcome(OO_SEVERITY_ERROR, theMessage, theCode);
}
public IBaseOperationOutcome createInfoOperationOutcome(String theMessage) {
return createOperationOutcome(OO_SEVERITY_INFO, theMessage, "informational");
}
protected abstract IBaseOperationOutcome createOperationOutcome(String theSeverity, String theMessage, String theCode);
@Override
public DaoMethodOutcome delete(IIdType theId, RequestDetails theRequestDetails) {
List<DeleteConflict> deleteConflicts = new ArrayList<DeleteConflict>();
StopWatch w = new StopWatch();
ResourceTable savedEntity = delete(theId, deleteConflicts, theRequestDetails);
validateDeleteConflictsEmptyOrThrowException(deleteConflicts);
ourLog.info("Processed delete on {} in {}ms", theId.getValue(), w.getMillisAndRestart());
return toMethodOutcome(savedEntity, null);
}
@Override
public ResourceTable delete(IIdType theId, List<DeleteConflict> deleteConflicts, RequestDetails theRequestDetails) {
if (theId == null || !theId.hasIdPart()) {
throw new InvalidRequestException("Can not perform delete, no ID provided");
}
final ResourceTable entity = readEntityLatestVersion(theId);
if (theId.hasVersionIdPart() && Long.parseLong(theId.getVersionIdPart()) != entity.getVersion()) {
throw new InvalidRequestException("Trying to delete " + theId + " but this is not the current version");
}
validateOkToDelete(deleteConflicts, entity);
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(theId, theId.getResourceType(), getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.DELETE, requestDetails);
Date updateTime = new Date();
ResourceTable savedEntity = updateEntity(null, entity, true, updateTime, updateTime, theRequestDetails);
// Notify JPA interceptors
for (IServerInterceptor next : getConfig().getInterceptors()) {
if (next instanceof IJpaServerInterceptor) {
((IJpaServerInterceptor) next).resourceDeleted(requestDetails, entity);
}
}
return savedEntity;
}
@Override
public DaoMethodOutcome deleteByUrl(String theUrl, RequestDetails theRequestDetails) {
StopWatch w = new StopWatch();
List<DeleteConflict> deleteConflicts = new ArrayList<DeleteConflict>();
List<ResourceTable> deletedResources = deleteByUrl(theUrl, deleteConflicts, theRequestDetails);
validateDeleteConflictsEmptyOrThrowException(deleteConflicts);
if (deletedResources.isEmpty()) {
throw new ResourceNotFoundException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "unableToDeleteNotFound", theUrl));
}
ourLog.info("Processed delete on {} (matched {} resource(s)) in {}ms", new Object[] { theUrl, deletedResources.size(), w.getMillisAndRestart() });
return new DaoMethodOutcome();
}
@Override
public List<ResourceTable> deleteByUrl(String theUrl, List<DeleteConflict> deleteConflicts, RequestDetails theRequestDetails) {
Set<Long> resource = processMatchUrl(theUrl, myResourceType);
if (resource.size() > 1) {
if (myDaoConfig.isAllowMultipleDelete() == false) {
throw new PreconditionFailedException(getContext().getLocalizer().getMessage(BaseHapiFhirDao.class, "transactionOperationWithMultipleMatchFailure", "DELETE", theUrl, resource.size()));
}
}
List<ResourceTable> retVal = new ArrayList<ResourceTable>();
for (Long pid : resource) {
ResourceTable entity = myEntityManager.find(ResourceTable.class, pid);
retVal.add(entity);
validateOkToDelete(deleteConflicts, entity);
// Notify interceptors
IdDt idToDelete = entity.getIdDt();
ActionRequestDetails requestDetails = new ActionRequestDetails(idToDelete, idToDelete.getResourceType(), getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.DELETE, requestDetails);
// Perform delete
Date updateTime = new Date();
updateEntity(null, entity, true, updateTime, updateTime, theRequestDetails);
// Notify JPA interceptors
for (IServerInterceptor next : getConfig().getInterceptors()) {
if (next instanceof IJpaServerInterceptor) {
((IJpaServerInterceptor) next).resourceDeleted(requestDetails, entity);
}
}
}
return retVal;
}
private DaoMethodOutcome doCreate(T theResource, String theIfNoneExist, boolean thePerformIndexing, Date theUpdateTime, RequestDetails theRequestDetails) {
StopWatch w = new StopWatch();
preProcessResourceForStorage(theResource);
ResourceTable entity = new ResourceTable();
entity.setResourceType(toResourceName(theResource));
if (isNotBlank(theIfNoneExist)) {
Set<Long> match = processMatchUrl(theIfNoneExist, myResourceType);
if (match.size() > 1) {
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirDao.class, "transactionOperationWithMultipleMatchFailure", "CREATE", theIfNoneExist, match.size());
throw new PreconditionFailedException(msg);
} else if (match.size() == 1) {
Long pid = match.iterator().next();
entity = myEntityManager.find(ResourceTable.class, pid);
return toMethodOutcome(entity, theResource).setCreated(false);
}
}
if (isNotBlank(theResource.getIdElement().getIdPart())) {
if (isValidPid(theResource.getIdElement())) {
throw new UnprocessableEntityException("This server cannot create an entity with a user-specified numeric ID - Client should not specify an ID when creating a new resource, or should include at least one letter in the ID to force a client-defined ID");
}
createForcedIdIfNeeded(entity, theResource.getIdElement());
if (entity.getForcedId() != null) {
try {
translateForcedIdToPid(theResource.getIdElement());
throw new UnprocessableEntityException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "duplicateCreateForcedId", theResource.getIdElement().getIdPart()));
} catch (ResourceNotFoundException e) {
// good, this ID doesn't exist so we can create it
}
}
}
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(theResource.getIdElement(), toResourceName(theResource), theResource, getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.CREATE, requestDetails);
// Perform actual DB update
updateEntity(theResource, entity, false, null, thePerformIndexing, true, theUpdateTime, theRequestDetails);
// Notify JPA interceptors
for (IServerInterceptor next : getConfig().getInterceptors()) {
if (next instanceof IJpaServerInterceptor) {
((IJpaServerInterceptor) next).resourceCreated(requestDetails, entity);
}
}
DaoMethodOutcome outcome = toMethodOutcome(entity, theResource).setCreated(true);
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulCreate", outcome.getId(), w.getMillisAndRestart());
outcome.setOperationOutcome(createInfoOperationOutcome(msg));
ourLog.info(msg);
return outcome;
}
@Override
public TagList getAllResourceTags(RequestDetails theRequestDetails) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(null, null, getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.GET_TAGS, requestDetails);
StopWatch w = new StopWatch();
TagList tags = super.getTags(myResourceType, null);
ourLog.info("Processed getTags on {} in {}ms", myResourceName, w.getMillisAndRestart());
return tags;
}
protected abstract List<Object> getIncludeValues(FhirTerser theTerser, Include theInclude, IBaseResource theResource, RuntimeResourceDefinition theResourceDef);
public String getResourceName() {
return myResourceName;
}
@Override
public Class<T> getResourceType() {
return myResourceType;
}
@Override
public TagList getTags(IIdType theResourceId, RequestDetails theRequestDetails) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(theResourceId, null, getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.GET_TAGS, requestDetails);
StopWatch w = new StopWatch();
TagList retVal = super.getTags(myResourceType, theResourceId);
ourLog.info("Processed getTags on {} in {}ms", theResourceId, w.getMillisAndRestart());
return retVal;
}
@Override
public IBundleProvider history(Date theSince, RequestDetails theRequestDetails) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(null, null, getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.HISTORY_SYSTEM, requestDetails);
StopWatch w = new StopWatch();
IBundleProvider retVal = super.history(myResourceName, null, theSince);
ourLog.info("Processed history on {} in {}ms", myResourceName, w.getMillisAndRestart());
return retVal;
}
@Override
public IBundleProvider history(final IIdType theId, final Date theSince, RequestDetails theRequestDetails) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(theId, getResourceName(), getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.HISTORY_INSTANCE, requestDetails);
final InstantDt end = createHistoryToTimestamp();
final String resourceType = getContext().getResourceDefinition(myResourceType).getName();
T currentTmp;
try {
BaseHasResource entity = readEntity(theId.toVersionless(), false);
validateResourceType(entity);
currentTmp = toResource(myResourceType, entity, true);
Date lastUpdated;
if (currentTmp instanceof IResource) {
lastUpdated = ResourceMetadataKeyEnum.UPDATED.get((IResource) currentTmp).getValue();
} else {
lastUpdated = ((IAnyResource) currentTmp).getMeta().getLastUpdated();
}
if (lastUpdated.after(end.getValue())) {
currentTmp = null;
}
} catch (ResourceNotFoundException e) {
currentTmp = null;
}
final T current = currentTmp;
StringBuilder B = new StringBuilder();
B.append("SELECT count(h) FROM ResourceHistoryTable h ");
B.append("WHERE h.myResourceId = :PID AND h.myResourceType = :RESTYPE");
B.append(" AND h.myUpdated < :END");
B.append((theSince != null ? " AND h.myUpdated >= :SINCE" : ""));
String querySring = B.toString();
TypedQuery<Long> countQuery = myEntityManager.createQuery(querySring, Long.class);
countQuery.setParameter("PID", translateForcedIdToPid(theId));
countQuery.setParameter("RESTYPE", resourceType);
countQuery.setParameter("END", end.getValue(), TemporalType.TIMESTAMP);
if (theSince != null) {
countQuery.setParameter("SINCE", theSince, TemporalType.TIMESTAMP);
}
int historyCount = countQuery.getSingleResult().intValue();
final int offset;
final int count;
if (current != null) {
count = historyCount + 1;
offset = 1;
} else {
offset = 0;
count = historyCount;
}
if (count == 0) {
throw new ResourceNotFoundException(theId);
}
return new IBundleProvider() {
@Override
public InstantDt getPublished() {
return end;
}
@Override
public List<IBaseResource> getResources(int theFromIndex, int theToIndex) {
List<IBaseResource> retVal = new ArrayList<IBaseResource>();
if (theFromIndex == 0 && current != null) {
retVal.add(current);
}
StringBuilder b = new StringBuilder();
b.append("SELECT h FROM ResourceHistoryTable h WHERE h.myResourceId = :PID AND h.myResourceType = :RESTYPE AND h.myUpdated < :END ");
b.append((theSince != null ? " AND h.myUpdated >= :SINCE" : ""));
b.append(" ORDER BY h.myUpdated DESC");
TypedQuery<ResourceHistoryTable> q = myEntityManager.createQuery(b.toString(), ResourceHistoryTable.class);
q.setParameter("PID", translateForcedIdToPid(theId));
q.setParameter("RESTYPE", resourceType);
q.setParameter("END", end.getValue(), TemporalType.TIMESTAMP);
if (theSince != null) {
q.setParameter("SINCE", theSince, TemporalType.TIMESTAMP);
}
int firstResult = Math.max(0, theFromIndex - offset);
q.setFirstResult(firstResult);
int maxResults = (theToIndex - theFromIndex) + 1;
q.setMaxResults(maxResults);
List<ResourceHistoryTable> results = q.getResultList();
for (ResourceHistoryTable next : results) {
if (retVal.size() == maxResults) {
break;
}
retVal.add(toResource(myResourceType, next, true));
}
return retVal;
}
@Override
public Integer preferredPageSize() {
return null;
}
@Override
public int size() {
return count;
}
};
}
@Override
public IBundleProvider history(Long theId, Date theSince, RequestDetails theRequestDetails) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(null, getResourceName(), getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.HISTORY_TYPE, requestDetails);
StopWatch w = new StopWatch();
IBundleProvider retVal = super.history(myResourceName, theId, theSince);
ourLog.info("Processed history on {} in {}ms", theId, w.getMillisAndRestart());
return retVal;
}
@Override
public <MT extends IBaseMetaType> MT metaAddOperation(IIdType theResourceId, MT theMetaAdd, RequestDetails theRequestDetails) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(theResourceId, getResourceName(), getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.META_ADD, requestDetails);
StopWatch w = new StopWatch();
BaseHasResource entity = readEntity(theResourceId);
if (entity == null) {
throw new ResourceNotFoundException(theResourceId);
}
List<TagDefinition> tags = toTagList(theMetaAdd);
//@formatter:off
for (TagDefinition nextDef : tags) {
boolean hasTag = false;
for (BaseTag next : new ArrayList<BaseTag>(entity.getTags())) {
if (ObjectUtil.equals(next.getTag().getTagType(), nextDef.getTagType()) &&
ObjectUtil.equals(next.getTag().getSystem(), nextDef.getSystem()) &&
ObjectUtil.equals(next.getTag().getCode(), nextDef.getCode())) {
hasTag = true;
break;
}
}
if (!hasTag) {
entity.setHasTags(true);
TagDefinition def = getTag(nextDef.getTagType(), nextDef.getSystem(), nextDef.getCode(), nextDef.getDisplay());
BaseTag newEntity = entity.addTag(def);
myEntityManager.persist(newEntity);
}
}
//@formatter:on
myEntityManager.merge(entity);
ourLog.info("Processed metaAddOperation on {} in {}ms", new Object[] { theResourceId, w.getMillisAndRestart() });
@SuppressWarnings("unchecked")
MT retVal = (MT) metaGetOperation(theMetaAdd.getClass(), theResourceId, theRequestDetails);
return retVal;
}
// @Override
// public IBundleProvider everything(IIdType theId) {
// Search search = new Search();
// search.setUuid(UUID.randomUUID().toString());
// search.setCreated(new Date());
// myEntityManager.persist(search);
//
// List<SearchResult> results = new ArrayList<SearchResult>();
// if (theId != null) {
// Long pid = translateForcedIdToPid(theId);
// ResourceTable entity = myEntityManager.find(ResourceTable.class, pid);
// validateGivenIdIsAppropriateToRetrieveResource(theId, entity);
// SearchResult res = new SearchResult(search);
// res.setResourcePid(pid);
// results.add(res);
// } else {
// TypedQuery<Tuple> query = createSearchAllByTypeQuery();
// for (Tuple next : query.getResultList()) {
// SearchResult res = new SearchResult(search);
// res.setResourcePid(next.get(0, Long.class));
// results.add(res);
// }
// }
//
// int totalCount = results.size();
// mySearchResultDao.save(results);
// mySearchResultDao.flush();
//
// CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
//
// // Load _revincludes
// CriteriaQuery<Long> cq = builder.createQuery(Long.class);
// Root<ResourceLink> from = cq.from(ResourceLink.class);
// cq.select(from.get("mySourceResourcePid").as(Long.class));
//
// Subquery<Long> pidsSubquery = cq.subquery(Long.class);
// Root<SearchResult> pidsSubqueryFrom = pidsSubquery.from(SearchResult.class);
// pidsSubquery.select(pidsSubqueryFrom.get("myResourcePid").as(Long.class));
// pidsSubquery.where(pidsSubqueryFrom.get("mySearch").in(search));
//
// cq.where(from.get("myTargetResourceId").in(pidsSubquery));
// TypedQuery<Long> query = myEntityManager.createQuery(cq);
//
// results = new ArrayList<SearchResult>();
// for (Long next : query.getResultList()) {
// SearchResult res = new SearchResult(search);
// res.setResourcePid(next);
// results.add(res);
// }
//
// // Save _revincludes
// totalCount += results.size();
// mySearchResultDao.save(results);
// mySearchResultDao.flush();
//
// final int finalTotalCount = totalCount;
// return new IBundleProvider() {
//
// @Override
// public int size() {
// return finalTotalCount;
// }
//
// @Override
// public Integer preferredPageSize() {
// return null;
// }
//
// @Override
// public List<IBaseResource> getResources(int theFromIndex, int theToIndex) {
// // TODO Auto-generated method stub
// return null;
// }
//
// @Override
// public InstantDt getPublished() {
// // TODO Auto-generated method stub
// return null;
// }
// };
// }
@Override
public <MT extends IBaseMetaType> MT metaDeleteOperation(IIdType theResourceId, MT theMetaDel, RequestDetails theRequestDetails) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(theResourceId, getResourceName(), getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.META_DELETE, requestDetails);
StopWatch w = new StopWatch();
BaseHasResource entity = readEntity(theResourceId);
if (entity == null) {
throw new ResourceNotFoundException(theResourceId);
}
List<TagDefinition> tags = toTagList(theMetaDel);
//@formatter:off
for (TagDefinition nextDef : tags) {
for (BaseTag next : new ArrayList<BaseTag>(entity.getTags())) {
if (ObjectUtil.equals(next.getTag().getTagType(), nextDef.getTagType()) &&
ObjectUtil.equals(next.getTag().getSystem(), nextDef.getSystem()) &&
ObjectUtil.equals(next.getTag().getCode(), nextDef.getCode())) {
myEntityManager.remove(next);
entity.getTags().remove(next);
}
}
}
//@formatter:on
if (entity.getTags().isEmpty()) {
entity.setHasTags(false);
}
myEntityManager.merge(entity);
myEntityManager.flush();
ourLog.info("Processed metaDeleteOperation on {} in {}ms", new Object[] { theResourceId.getValue(), w.getMillisAndRestart() });
@SuppressWarnings("unchecked")
MT retVal = (MT) metaGetOperation(theMetaDel.getClass(), theResourceId, theRequestDetails);
return retVal;
}
@Override
public <MT extends IBaseMetaType> MT metaGetOperation(Class<MT> theType, RequestDetails theRequestDetails) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(null, getResourceName(), getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.META, requestDetails);
String sql = "SELECT d FROM TagDefinition d WHERE d.myId IN (SELECT DISTINCT t.myTagId FROM ResourceTag t WHERE t.myResourceType = :res_type)";
TypedQuery<TagDefinition> q = myEntityManager.createQuery(sql, TagDefinition.class);
q.setParameter("res_type", myResourceName);
List<TagDefinition> tagDefinitions = q.getResultList();
MT retVal = toMetaDt(theType, tagDefinitions);
return retVal;
}
protected <MT extends IBaseMetaType> MT toMetaDt(Class<MT> theType, Collection<TagDefinition> tagDefinitions) {
MT retVal;
try {
retVal = theType.newInstance();
} catch (Exception e) {
throw new InternalErrorException("Failed to instantiate " + theType.getName(), e);
}
for (TagDefinition next : tagDefinitions) {
switch (next.getTagType()) {
case PROFILE:
retVal.addProfile(next.getCode());
break;
case SECURITY_LABEL:
retVal.addSecurity().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay());
break;
case TAG:
retVal.addTag().setSystem(next.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay());
break;
}
}
return retVal;
}
@Override
public <MT extends IBaseMetaType> MT metaGetOperation(Class<MT> theType, IIdType theId, RequestDetails theRequestDetails) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(theId, getResourceName(), getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.META, requestDetails);
Set<TagDefinition> tagDefs = new HashSet<TagDefinition>();
BaseHasResource entity = readEntity(theId);
for (BaseTag next : entity.getTags()) {
tagDefs.add(next.getTag());
}
MT retVal = toMetaDt(theType, tagDefs);
retVal.setLastUpdated(entity.getUpdatedDate());
retVal.setVersionId(Long.toString(entity.getVersion()));
return retVal;
}
@PostConstruct
public void postConstruct() {
RuntimeResourceDefinition def = getContext().getResourceDefinition(myResourceType);
myResourceName = def.getName();
if (mySecondaryPrimaryKeyParamName != null) {
RuntimeSearchParam sp = def.getSearchParam(mySecondaryPrimaryKeyParamName);
if (sp == null) {
throw new ConfigurationException("Unknown search param on resource[" + myResourceName + "] for secondary key[" + mySecondaryPrimaryKeyParamName + "]");
}
if (sp.getParamType() != RestSearchParameterTypeEnum.TOKEN) {
throw new ConfigurationException("Search param on resource[" + myResourceName + "] for secondary key[" + mySecondaryPrimaryKeyParamName + "] is not a token type, only token is supported");
}
}
}
/**
* May be overridden by subclasses to validate resources prior to storage
*
* @param theResource
* The resource that is about to be stored
*/
protected void preProcessResourceForStorage(T theResource) {
String type = getContext().getResourceDefinition(theResource).getName();
if (!getResourceName().equals(type)) {
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "incorrectResourceType", type, getResourceName()));
}
if (theResource.getIdElement().hasIdPart()) {
if (!theResource.getIdElement().isIdPartValid()) {
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "failedToCreateWithInvalidId", theResource.getIdElement().getIdPart()));
}
}
}
@Override
public Set<Long> processMatchUrl(String theMatchUrl) {
return processMatchUrl(theMatchUrl, getResourceType());
}
@Override
public T read(IIdType theId, RequestDetails theRequestDetails) {
validateResourceTypeAndThrowIllegalArgumentException(theId);
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(theId, getResourceName(), getContext(), theRequestDetails);
RestOperationTypeEnum operationType = theId.hasVersionIdPart() ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ;
notifyInterceptors(operationType, requestDetails);
StopWatch w = new StopWatch();
BaseHasResource entity = readEntity(theId);
validateResourceType(entity);
T retVal = toResource(myResourceType, entity, false);
IPrimitiveType<Date> deleted;
if (retVal instanceof IResource) {
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) retVal);
} else {
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) retVal);
}
if (deleted != null && !deleted.isEmpty()) {
throw new ResourceGoneException("Resource was deleted at " + deleted.getValueAsString());
}
ourLog.info("Processed read on {} in {}ms", theId.getValue(), w.getMillisAndRestart());
return retVal;
}
@Override
public BaseHasResource readEntity(IIdType theId) {
boolean checkForForcedId = true;
BaseHasResource entity = readEntity(theId, checkForForcedId);
return entity;
}
@Override
public BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId) {
validateResourceTypeAndThrowIllegalArgumentException(theId);
Long pid = translateForcedIdToPid(theId);
BaseHasResource entity = myEntityManager.find(ResourceTable.class, pid);
if (entity == null) {
throw new ResourceNotFoundException(theId);
}
if (theId.hasVersionIdPart()) {
if (theId.isVersionIdPartValidLong() == false) {
throw new ResourceNotFoundException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless()));
}
if (entity.getVersion() != theId.getVersionIdPartAsLong().longValue()) {
entity = null;
}
}
if (entity == null) {
if (theId.hasVersionIdPart()) {
TypedQuery<ResourceHistoryTable> q = myEntityManager.createQuery("SELECT t from ResourceHistoryTable t WHERE t.myResourceId = :RID AND t.myResourceType = :RTYP AND t.myResourceVersion = :RVER", ResourceHistoryTable.class);
q.setParameter("RID", pid);
q.setParameter("RTYP", myResourceName);
q.setParameter("RVER", theId.getVersionIdPartAsLong());
try {
entity = q.getSingleResult();
} catch (NoResultException e) {
throw new ResourceNotFoundException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidVersion", theId.getVersionIdPart(), theId.toUnqualifiedVersionless()));
}
}
}
validateResourceType(entity);
if (theCheckForForcedId) {
validateGivenIdIsAppropriateToRetrieveResource(theId, entity);
}
return entity;
}
protected ResourceTable readEntityLatestVersion(IIdType theId) {
ResourceTable entity = myEntityManager.find(ResourceTable.class, translateForcedIdToPid(theId));
if (entity == null) {
throw new ResourceNotFoundException(theId);
}
validateGivenIdIsAppropriateToRetrieveResource(theId, entity);
return entity;
}
@Override
public void reindex(T theResource, ResourceTable theEntity, RequestDetails theRequestDetails) {
updateEntity(theResource, theEntity, false, null, true, false, theEntity.getUpdatedDate(), theRequestDetails);
}
@Override
public void removeTag(IIdType theId, TagTypeEnum theTagType, String theScheme, String theTerm, RequestDetails theRequestDetails) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(theId, getResourceName(), getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.DELETE_TAGS, requestDetails);
StopWatch w = new StopWatch();
BaseHasResource entity = readEntity(theId);
if (entity == null) {
throw new ResourceNotFoundException(theId);
}
//@formatter:off
for (BaseTag next : new ArrayList<BaseTag>(entity.getTags())) {
if (ObjectUtil.equals(next.getTag().getTagType(), theTagType) &&
ObjectUtil.equals(next.getTag().getSystem(), theScheme) &&
ObjectUtil.equals(next.getTag().getCode(), theTerm)) {
myEntityManager.remove(next);
entity.getTags().remove(next);
}
}
//@formatter:on
if (entity.getTags().isEmpty()) {
entity.setHasTags(false);
}
myEntityManager.merge(entity);
ourLog.info("Processed remove tag {}/{} on {} in {}ms", new Object[] { theScheme, theTerm, theId.getValue(), w.getMillisAndRestart() });
}
@Override
public IBundleProvider search(Map<String, IQueryParameterType> theParams) {
SearchParameterMap map = new SearchParameterMap();
for (Entry<String, IQueryParameterType> nextEntry : theParams.entrySet()) {
map.add(nextEntry.getKey(), (nextEntry.getValue()));
}
return search(map);
}
@Override
public IBundleProvider search(final SearchParameterMap theParams) {
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(null, getResourceName(), getContext(), theParams.getRequestDetails());
notifyInterceptors(RestOperationTypeEnum.SEARCH_TYPE, requestDetails);
SearchBuilder builder = new SearchBuilder(getContext(), myEntityManager, myPlatformTransactionManager, mySearchDao, mySearchResultDao, this, myResourceIndexedSearchParamUriDao);
builder.setType(getResourceType(), getResourceName());
return builder.search(theParams);
}
@Override
public IBundleProvider search(String theParameterName, IQueryParameterType theValue) {
return search(Collections.singletonMap(theParameterName, theValue));
}
@Override
public Set<Long> searchForIds(Map<String, IQueryParameterType> theParams) {
SearchParameterMap map = new SearchParameterMap();
for (Entry<String, IQueryParameterType> nextEntry : theParams.entrySet()) {
map.add(nextEntry.getKey(), (nextEntry.getValue()));
}
return searchForIdsWithAndOr(map, null, null);
}
@Override
public Set<Long> searchForIds(String theParameterName, IQueryParameterType theValue) {
return searchForIds(Collections.singletonMap(theParameterName, theValue));
}
@Override
public Set<Long> searchForIdsWithAndOr(SearchParameterMap theParams, Collection<Long> theInitialPids, DateRangeParam theLastUpdated) {
SearchBuilder builder = new SearchBuilder(getContext(), myEntityManager, myPlatformTransactionManager, mySearchDao, mySearchResultDao, this, myResourceIndexedSearchParamUriDao);
builder.setType(getResourceType(), getResourceName());
return builder.searchForIdsWithAndOr(theParams, theInitialPids, theLastUpdated);
}
@SuppressWarnings("unchecked")
@Required
public void setResourceType(Class<? extends IBaseResource> theTableType) {
myResourceType = (Class<T>) theTableType;
}
/**
* If set, the given param will be treated as a secondary primary key, and multiple resources will not be able to
* share the same value.
*/
public void setSecondaryPrimaryKeyParamName(String theSecondaryPrimaryKeyParamName) {
mySecondaryPrimaryKeyParamName = theSecondaryPrimaryKeyParamName;
}
private DaoMethodOutcome toMethodOutcome(final BaseHasResource theEntity, IBaseResource theResource) {
DaoMethodOutcome outcome = new DaoMethodOutcome();
IIdType id = theEntity.getIdDt();
if (getContext().getVersion().getVersion().isRi()) {
id = new IdType(id.getValue());
}
outcome.setId(id);
outcome.setResource(theResource);
if (theResource != null) {
theResource.setId(id);
if (theResource instanceof IResource) {
ResourceMetadataKeyEnum.UPDATED.put((IResource) theResource, theEntity.getUpdated());
} else {
IBaseMetaType meta = ((IAnyResource) theResource).getMeta();
meta.setLastUpdated(theEntity.getUpdatedDate());
}
}
return outcome;
}
private DaoMethodOutcome toMethodOutcome(final ResourceTable theEntity, IBaseResource theResource) {
DaoMethodOutcome retVal = toMethodOutcome((BaseHasResource) theEntity, theResource);
retVal.setEntity(theEntity);
return retVal;
}
private ArrayList<TagDefinition> toTagList(IBaseMetaType theMeta) {
ArrayList<TagDefinition> retVal = new ArrayList<TagDefinition>();
for (IBaseCoding next : theMeta.getTag()) {
retVal.add(new TagDefinition(TagTypeEnum.TAG, next.getSystem(), next.getCode(), next.getDisplay()));
}
for (IBaseCoding next : theMeta.getSecurity()) {
retVal.add(new TagDefinition(TagTypeEnum.SECURITY_LABEL, next.getSystem(), next.getCode(), next.getDisplay()));
}
for (IPrimitiveType<String> next : theMeta.getProfile()) {
retVal.add(new TagDefinition(TagTypeEnum.PROFILE, BaseHapiFhirDao.NS_JPA_PROFILE, next.getValue(), null));
}
return retVal;
}
@Override
public DaoMethodOutcome update(T theResource, RequestDetails theRequestDetails) {
return update(theResource, null, theRequestDetails);
}
@Override
public DaoMethodOutcome update(T theResource, String theMatchUrl, RequestDetails theRequestDetails) {
return update(theResource, theMatchUrl, true, theRequestDetails);
}
@Override
public DaoMethodOutcome update(T theResource, String theMatchUrl, boolean thePerformIndexing, RequestDetails theRequestDetails) {
StopWatch w = new StopWatch();
preProcessResourceForStorage(theResource);
final ResourceTable entity;
IIdType resourceId;
if (isNotBlank(theMatchUrl)) {
Set<Long> match = processMatchUrl(theMatchUrl, myResourceType);
if (match.size() > 1) {
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirDao.class, "transactionOperationWithMultipleMatchFailure", "UPDATE", theMatchUrl, match.size());
throw new PreconditionFailedException(msg);
} else if (match.size() == 1) {
Long pid = match.iterator().next();
entity = myEntityManager.find(ResourceTable.class, pid);
resourceId = entity.getIdDt();
} else {
return create(theResource, null, thePerformIndexing, theRequestDetails);
}
} else {
resourceId = theResource.getIdElement();
if (resourceId == null || isBlank(resourceId.getIdPart())) {
throw new InvalidRequestException("Can not update a resource with no ID");
}
try {
entity = readEntityLatestVersion(resourceId);
} catch (ResourceNotFoundException e) {
if (resourceId.isIdPartValidLong()) {
throw new InvalidRequestException(getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "failedToCreateWithClientAssignedNumericId", theResource.getIdElement().getIdPart()));
}
return doCreate(theResource, null, thePerformIndexing, new Date(), theRequestDetails);
}
}
if (resourceId.hasVersionIdPart() && Long.parseLong(resourceId.getVersionIdPart()) != entity.getVersion()) {
throw new InvalidRequestException("Trying to update " + resourceId + " but this is not the current version");
}
if (resourceId.hasResourceType() && !resourceId.getResourceType().equals(getResourceName())) {
throw new UnprocessableEntityException("Invalid resource ID[" + entity.getIdDt().toUnqualifiedVersionless() + "] of type[" + entity.getResourceType() + "] - Does not match expected [" + getResourceName() + "]");
}
// Notify interceptors
ActionRequestDetails requestDetails = new ActionRequestDetails(resourceId, getResourceName(), theResource, getContext(), theRequestDetails);
notifyInterceptors(RestOperationTypeEnum.UPDATE, requestDetails);
// Perform update
ResourceTable savedEntity = updateEntity(theResource, entity, true, null, thePerformIndexing, true, new Date(), theRequestDetails);
// Notify JPA interceptors
for (IServerInterceptor next : getConfig().getInterceptors()) {
if (next instanceof IJpaServerInterceptor) {
((IJpaServerInterceptor) next).resourceUpdated(requestDetails, entity);
}
}
DaoMethodOutcome outcome = toMethodOutcome(savedEntity, theResource).setCreated(false);
String msg = getContext().getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "successfulCreate", outcome.getId(), w.getMillisAndRestart());
outcome.setOperationOutcome(createInfoOperationOutcome(msg));
ourLog.info(msg);
return outcome;
}
private void validateGivenIdIsAppropriateToRetrieveResource(IIdType theId, BaseHasResource entity) {
if (entity.getForcedId() != null) {
if (theId.isIdPartValidLong()) {
// This means that the resource with the given numeric ID exists, but it has a "forced ID", meaning that
// as far as the outside world is concerned, the given ID doesn't exist (it's just an internal pointer
// to the
// forced ID)
throw new ResourceNotFoundException(theId);
}
}
}
protected void validateOkToDelete(List<DeleteConflict> theDeleteConflicts, ResourceTable theEntity) {
TypedQuery<ResourceLink> query = myEntityManager.createQuery("SELECT l FROM ResourceLink l WHERE l.myTargetResourcePid = :target_pid", ResourceLink.class);
query.setParameter("target_pid", theEntity.getId());
query.setMaxResults(1);
List<ResourceLink> resultList = query.getResultList();
if (resultList.isEmpty()) {
return;
}
ResourceLink link = resultList.get(0);
IdDt targetId = theEntity.getIdDt();
IdDt sourceId = link.getSourceResource().getIdDt();
String sourcePath = link.getSourcePath();
theDeleteConflicts.add(new DeleteConflict(sourceId, sourcePath, targetId));
}
private void validateResourceType(BaseHasResource entity) {
validateResourceType(entity, myResourceName);
}
private void validateResourceTypeAndThrowIllegalArgumentException(IIdType theId) {
if (theId.hasResourceType() && !theId.getResourceType().equals(myResourceName)) {
throw new IllegalArgumentException("Incorrect resource type (" + theId.getResourceType() + ") for this DAO, wanted: " + myResourceName);
}
}
}
|
steve1medix/hapi-fhir
|
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirResourceDao.java
|
Java
|
apache-2.0
| 43,306
|
/*
Copyright 2010-2013 SourceGear, LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//////////////////////////////////////////////////////////////////
#include <sg.h>
#include "sg_wc__public_typedefs.h"
#include "sg_wc__public_prototypes.h"
#include "sg_wc__private.h"
#include "sg_vv2__public_typedefs.h"
#include "sg_vv2__public_prototypes.h"
#include "sg_vv2__private.h"
//////////////////////////////////////////////////////////////////
static SG_varray_foreach_callback _diff_to_stream_cb;
static void _diff_to_stream_cb(SG_context * pCtx,
void * pVoidData,
const SG_varray * pvaStatus,
SG_uint32 k,
const SG_variant * pv)
{
sg_vv6diff__diff_to_stream_data * pData = (sg_vv6diff__diff_to_stream_data *)pVoidData;
SG_vhash * pvhItem; // we do not own this
SG_vhash * pvhItemStatus; // we do not own this
SG_int64 i64;
SG_wc_status_flags statusFlags;
SG_UNUSED( pvaStatus );
SG_UNUSED( k );
SG_ERR_CHECK( SG_variant__get__vhash(pCtx, pv, &pvhItem) );
SG_ERR_CHECK( SG_vhash__get__vhash(pCtx, pvhItem, "status", &pvhItemStatus) );
SG_ERR_CHECK( SG_vhash__get__int64(pCtx, pvhItemStatus, "flags", &i64) );
statusFlags = (SG_wc_status_flags)i64;
switch (statusFlags & SG_WC_STATUS_FLAGS__T__MASK)
{
case SG_WC_STATUS_FLAGS__T__FILE:
SG_ERR_CHECK( sg_vv2__diff__file(pCtx, pData, pvhItem, statusFlags) );
break;
case SG_WC_STATUS_FLAGS__T__DIRECTORY:
SG_ERR_CHECK( sg_vv2__diff__directory(pCtx, pData, pvhItem, statusFlags) );
break;
case SG_WC_STATUS_FLAGS__T__SYMLINK:
SG_ERR_CHECK( sg_vv2__diff__symlink(pCtx, pData, pvhItem, statusFlags) );
break;
// case SG_WC_STATUS_FLAGS__T__SUBREPO:
default:
SG_ERR_THROW( SG_ERR_NOTIMPLEMENTED );
}
fail:
return;
}
void sg_vv2__diff__diff_to_stream(SG_context * pCtx,
const char * pszRepoName,
const SG_varray * pvaStatus,
SG_bool bInteractive,
const char * pszTool,
SG_vhash ** ppvhResultCodes)
{
sg_vv6diff__diff_to_stream_data data;
SG_vhash * pvhWcInfo = NULL;
SG_bool bGivenRepoName;
// ppvhResultCodes is optional.
memset(&data, 0, sizeof(data));
bGivenRepoName = (pszRepoName && *pszRepoName);
if (!bGivenRepoName)
{
// If they didn't give us a RepoName, try to get it from the WD.
SG_wc__get_wc_info(pCtx, NULL, &pvhWcInfo);
if (SG_CONTEXT__HAS_ERR(pCtx))
SG_ERR_RETHROW2( (pCtx, "Use 'repo' option or be in a working copy.") );
SG_ERR_CHECK( SG_vhash__get__sz(pCtx, pvhWcInfo, "repo", &pszRepoName) );
}
SG_ERR_CHECK( SG_REPO__OPEN_REPO_INSTANCE(pCtx, pszRepoName, &data.pRepo) );
data.pPathSessionTempDir = NULL; // defer until needed
data.pszTool = pszTool;
data.bInteractive = bInteractive;
if (bInteractive)
data.pszDiffToolContext = SG_DIFFTOOL__CONTEXT__GUI;
else
data.pszDiffToolContext = SG_DIFFTOOL__CONTEXT__CONSOLE;
data.pszSubsectionLeft = SG_WC__STATUS_SUBSECTION__A; // must match values in sg_vv2__status.c
data.pszSubsectionRight = SG_WC__STATUS_SUBSECTION__B;
if (ppvhResultCodes)
{
// The caller wants to know if we were able to lauch
// a difftool. However, we take a VARRAY of items to
// compare -- rather than a single item. So we must
// build a container to return the individual tool
// results on each file.
SG_ERR_CHECK( SG_VHASH__ALLOC(pCtx, &data.pvhResultCodes) );
}
SG_ERR_CHECK( SG_varray__foreach(pCtx, pvaStatus, _diff_to_stream_cb, &data) );
if (ppvhResultCodes)
{
*ppvhResultCodes = data.pvhResultCodes;
data.pvhResultCodes = NULL;
}
fail:
SG_VHASH_NULLFREE(pCtx, pvhWcInfo);
SG_REPO_NULLFREE(pCtx, data.pRepo);
if (data.pPathSessionTempDir)
{
// we may or may not be able to delete the tmp dir (they may be visiting it in another window)
// so we have to allow this to fail and not mess up the real context.
SG_ERR_IGNORE( SG_fsobj__rmdir_recursive__pathname(pCtx, data.pPathSessionTempDir) );
SG_PATHNAME_NULLFREE(pCtx, data.pPathSessionTempDir);
}
SG_VHASH_NULLFREE(pCtx, data.pvhResultCodes);
}
void sg_vv2__diff__print_header_on_console(SG_context * pCtx,
sg_vv6diff__diff_to_stream_data * pData,
const SG_string * pStringHeader)
{
if (pData->bInteractive)
return;
SG_ERR_CHECK( SG_console__raw(pCtx, SG_CS_STDOUT, SG_string__sz(pStringHeader)) );
fail:
return;
}
|
glycerine/vj
|
src/veracity/src/libraries/vv2/vv6diff/sg_vv2__diff__diff_to_stream.c
|
C
|
apache-2.0
| 4,838
|
/*
Copyright 2007,2008,2009,2010 Rustici Software, LLC
Copyright 2010,2011 Kevin Glynn (kevin.glynn@twigletsoftware.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author(s):
Kevin Glynn (kevin.glynn@twigletsoftware.com)
*/
package CS2JNet.System.Web.Services.Protocols;
import java.io.InputStream;
import CS2JNet.System.NotImplementedException;
public class SoapMessage {
public InputStream getStream() throws NotImplementedException {
// TODO Auto-generated method stub
throw new NotImplementedException("SoapMessage.getStream()");
}
}
|
datancoffee/sirocco
|
src/main/java/CS2JNet/System/Web/Services/Protocols/SoapMessage.java
|
Java
|
apache-2.0
| 1,087
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.mvc;
import com.intellij.ProjectTopics;
import com.intellij.codeInsight.actions.ReformatCodeProcessor;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.components.AbstractProjectComponent;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.util.ProgressIndicatorUtils;
import com.intellij.openapi.progress.util.ReadTask;
import com.intellij.openapi.project.DumbAwareRunnable;
import com.intellij.openapi.project.ModuleListener;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.SimpleModificationTracker;
import com.intellij.openapi.util.Trinity;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.impl.BulkVirtualFileListenerAdapter;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowAnchor;
import com.intellij.openapi.wm.ToolWindowEP;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.ui.GuiUtils;
import com.intellij.util.concurrency.AppExecutorUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBusConnection;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import org.jetbrains.plugins.groovy.mvc.projectView.MvcToolWindowDescriptor;
import java.util.*;
import java.util.concurrent.ExecutorService;
/**
* @author peter
*/
public class MvcModuleStructureSynchronizer extends AbstractProjectComponent {
private static final ExecutorService ourExecutor = AppExecutorUtil.createBoundedApplicationPoolExecutor("MvcModuleStructureSynchronizer pool",1);
private final Set<Pair<Object, SyncAction>> myOrders = new LinkedHashSet<>();
private Set<VirtualFile> myPluginRoots = Collections.emptySet();
private boolean myOutOfModuleDirectoryCreatedActionAdded;
public static boolean ourGrailsTestFlag;
private final SimpleModificationTracker myModificationTracker = new SimpleModificationTracker();
public MvcModuleStructureSynchronizer(Project project) {
super(project);
}
public SimpleModificationTracker getFileAndRootsModificationTracker() {
return myModificationTracker;
}
@Override
public void initComponent() {
final MessageBusConnection connection = myProject.getMessageBus().connect();
connection.subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootListener() {
@Override
public void rootsChanged(ModuleRootEvent event) {
myModificationTracker.incModificationCount();
queue(SyncAction.SyncLibrariesInPluginsModule, myProject);
queue(SyncAction.UpgradeFramework, myProject);
queue(SyncAction.CreateAppStructureIfNeeded, myProject);
queue(SyncAction.UpdateProjectStructure, myProject);
queue(SyncAction.EnsureRunConfigurationExists, myProject);
updateProjectViewVisibility();
}
});
connection.subscribe(ProjectTopics.MODULES, new ModuleListener() {
@Override
public void moduleAdded(@NotNull Project project, @NotNull Module module) {
queue(SyncAction.UpdateProjectStructure, module);
queue(SyncAction.CreateAppStructureIfNeeded, module);
}
});
connection.subscribe(VirtualFileManager.VFS_CHANGES, new BulkVirtualFileListenerAdapter(new VirtualFileListener() {
@Override
public void fileCreated(@NotNull final VirtualFileEvent event) {
myModificationTracker.incModificationCount();
final VirtualFile file = event.getFile();
final String fileName = event.getFileName();
if (MvcModuleStructureUtil.APPLICATION_PROPERTIES.equals(fileName) || isApplicationDirectoryName(fileName)) {
queue(SyncAction.UpdateProjectStructure, file);
queue(SyncAction.EnsureRunConfigurationExists, file);
}
else if (isLibDirectory(file) || isLibDirectory(event.getParent())) {
queue(SyncAction.UpdateProjectStructure, file);
}
else {
if (!myProject.isInitialized()) return;
final Module module = ProjectRootManager.getInstance(myProject).getFileIndex().getModuleForFile(file);
if (module == null) { // Maybe it is creation of a plugin in plugin directory.
if (file.isDirectory()) {
if (myPluginRoots.contains(file.getParent())) {
queue(SyncAction.UpdateProjectStructure, myProject);
return;
}
if (!myOutOfModuleDirectoryCreatedActionAdded) {
queue(SyncAction.OutOfModuleDirectoryCreated, myProject);
myOutOfModuleDirectoryCreatedActionAdded = true;
}
}
return;
}
if (!MvcConsole.isUpdatingVfsByConsoleProcess(module)) return;
final MvcFramework framework = MvcFramework.getInstance(module);
if (framework == null) return;
if (framework.isToReformatOnCreation(file) || file.isDirectory()) {
ApplicationManager.getApplication().invokeLater(() -> {
if (!file.isValid()) return;
if (!framework.hasSupport(module)) return;
final List<VirtualFile> files = new ArrayList<>();
if (file.isDirectory()) {
ModuleRootManager.getInstance(module).getFileIndex().iterateContentUnderDirectory(file, new ContentIterator() {
@Override
public boolean processFile(VirtualFile fileOrDir) {
if (!fileOrDir.isDirectory() && framework.isToReformatOnCreation(fileOrDir)) {
files.add(file);
}
return true;
}
});
}
else {
files.add(file);
}
PsiManager manager = PsiManager.getInstance(myProject);
for (VirtualFile virtualFile : files) {
PsiFile psiFile = manager.findFile(virtualFile);
if (psiFile != null) {
new ReformatCodeProcessor(myProject, psiFile, null, false).run();
}
}
}, module.getDisposed());
}
}
}
@Override
public void fileDeleted(@NotNull VirtualFileEvent event) {
myModificationTracker.incModificationCount();
final VirtualFile file = event.getFile();
if (isLibDirectory(file) || isLibDirectory(event.getParent())) {
queue(SyncAction.UpdateProjectStructure, file);
}
}
@Override
public void contentsChanged(@NotNull VirtualFileEvent event) {
final String fileName = event.getFileName();
if (MvcModuleStructureUtil.APPLICATION_PROPERTIES.equals(fileName)) {
queue(SyncAction.UpdateProjectStructure, event.getFile());
}
}
@Override
public void fileMoved(@NotNull VirtualFileMoveEvent event) {
myModificationTracker.incModificationCount();
}
@Override
public void propertyChanged(@NotNull VirtualFilePropertyEvent event) {
if (VirtualFile.PROP_NAME.equals(event.getPropertyName())) {
myModificationTracker.incModificationCount();
}
}
}));
}
public static MvcModuleStructureSynchronizer getInstance(Project project) {
return project.getComponent(MvcModuleStructureSynchronizer.class);
}
private static boolean isApplicationDirectoryName(String fileName) {
for (MvcFramework framework : MvcFramework.EP_NAME.getExtensions()) {
if (framework.getApplicationDirectoryName().equals(fileName)) {
return true;
}
}
return false;
}
private static boolean isLibDirectory(@Nullable final VirtualFile file) {
return file != null && "lib".equals(file.getName());
}
@Override
public void projectOpened() {
queue(SyncAction.UpdateProjectStructure, myProject);
queue(SyncAction.EnsureRunConfigurationExists, myProject);
queue(SyncAction.UpgradeFramework, myProject);
queue(SyncAction.CreateAppStructureIfNeeded, myProject);
}
public void queue(SyncAction action, Object on) {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myProject.isDisposed()) return;
boolean shouldSchedule;
synchronized (myOrders) {
shouldSchedule = myOrders.isEmpty();
myOrders.add(Pair.create(on, action));
}
if (shouldSchedule) {
StartupManager.getInstance(myProject).runWhenProjectIsInitialized(new DumbAwareRunnable() {
@Override
public void run() {
scheduleRunActions();
}
});
}
}
private void scheduleRunActions() {
if (myProject.isDisposed()) return;
final Application app = ApplicationManager.getApplication();
if (app.isUnitTestMode()) {
if (ourGrailsTestFlag && !myProject.isInitialized()) {
runActions(computeRawActions(takeOrderSnapshot()));
}
return;
}
final Set<Pair<Object, SyncAction>> orderSnapshot = takeOrderSnapshot();
ReadTask task = new ReadTask() {
@Nullable
@Override
public Continuation performInReadAction(@NotNull final ProgressIndicator indicator) throws ProcessCanceledException {
final Set<Trinity<Module, SyncAction, MvcFramework>> actions = isUpToDate() ? computeRawActions(orderSnapshot)
: Collections.emptySet();
return new Continuation(() -> {
if (isUpToDate()) {
runActions(actions);
}
else if (!indicator.isCanceled()) {
scheduleRunActions();
}
}, ModalityState.NON_MODAL);
}
@Override
public void onCanceled(@NotNull ProgressIndicator indicator) {
scheduleRunActions();
}
private boolean isUpToDate() {
return !myProject.isDisposed() && orderSnapshot.equals(takeOrderSnapshot());
}
};
GuiUtils.invokeLaterIfNeeded(() -> ProgressIndicatorUtils.scheduleWithWriteActionPriority(ourExecutor, task), ModalityState.NON_MODAL);
}
private LinkedHashSet<Pair<Object, SyncAction>> takeOrderSnapshot() {
synchronized (myOrders) {
return new LinkedHashSet<>(myOrders);
}
}
@NotNull
private List<Module> determineModuleBySyncActionObject(Object o) {
if (o instanceof Module) {
return Collections.singletonList((Module)o);
}
if (o instanceof Project) {
return Arrays.asList(ModuleManager.getInstance((Project)o).getModules());
}
if (o instanceof VirtualFile) {
final VirtualFile file = (VirtualFile)o;
if (file.isValid()) {
final Module module = ModuleUtilCore.findModuleForFile(file, myProject);
if (module == null) {
return Collections.emptyList();
}
return Collections.singletonList(module);
}
}
return Collections.emptyList();
}
@TestOnly
public static void forceUpdateProject(Project project) {
MvcModuleStructureSynchronizer instance = project.getComponent(MvcModuleStructureSynchronizer.class);
instance.getFileAndRootsModificationTracker().incModificationCount();
instance.runActions(instance.computeRawActions(instance.takeOrderSnapshot()));
}
private void runActions(Set<Trinity<Module, SyncAction, MvcFramework>> actions) {
try {
boolean isProjectStructureUpdated = false;
for (final Trinity<Module, SyncAction, MvcFramework> rawAction : actions) {
final Module module = rawAction.first;
if (module.isDisposed()) {
continue;
}
if (rawAction.second == SyncAction.UpdateProjectStructure && rawAction.third.updatesWholeProject()) {
if (isProjectStructureUpdated) continue;
isProjectStructureUpdated = true;
}
rawAction.second.doAction(module, rawAction.third);
}
}
finally {
// if there were any actions added during performSyncAction, clear them too
// all needed actions are already added to buffer and have thus been performed
// otherwise you may get repetitive 'run create-app?' questions
synchronized (myOrders) {
myOrders.clear();
}
}
}
private Set<Trinity<Module, SyncAction, MvcFramework>> computeRawActions(Set<Pair<Object, SyncAction>> actions) {
//get module by object and kill duplicates
final Set<Trinity<Module, SyncAction, MvcFramework>> rawActions = new LinkedHashSet<>();
for (final Pair<Object, SyncAction> pair : actions) {
for (Module module : determineModuleBySyncActionObject(pair.first)) {
if (!module.isDisposed()) {
final MvcFramework framework = (pair.second == SyncAction.CreateAppStructureIfNeeded)
? MvcFramework.getInstanceBySdk(module)
: MvcFramework.getInstance(module);
if (framework != null && !framework.isAuxModule(module)) {
rawActions.add(Trinity.create(module, pair.second, framework));
}
}
}
}
return rawActions;
}
public enum SyncAction {
SyncLibrariesInPluginsModule {
@Override
void doAction(Module module, MvcFramework framework) {
if (MvcModuleStructureUtil.isEnabledStructureUpdate()) {
framework.syncSdkAndLibrariesInPluginsModule(module);
}
}
},
UpgradeFramework {
@Override
void doAction(Module module, MvcFramework framework) {
framework.upgradeFramework(module);
}
},
CreateAppStructureIfNeeded {
@Override
void doAction(Module module, MvcFramework framework) {
framework.createApplicationIfNeeded(module);
}
},
UpdateProjectStructure {
@Override
void doAction(final Module module, final MvcFramework framework) {
framework.updateProjectStructure(module);
}
},
EnsureRunConfigurationExists {
@Override
void doAction(Module module, MvcFramework framework) {
framework.ensureRunConfigurationExists(module);
}
},
OutOfModuleDirectoryCreated {
@Override
void doAction(Module module, MvcFramework framework) {
final Project project = module.getProject();
final MvcModuleStructureSynchronizer mvcModuleStructureSynchronizer = getInstance(project);
if (mvcModuleStructureSynchronizer.myOutOfModuleDirectoryCreatedActionAdded) {
mvcModuleStructureSynchronizer.myOutOfModuleDirectoryCreatedActionAdded = false;
Set<VirtualFile> roots = new HashSet<>();
for (String rootPath : MvcWatchedRootProvider.getRootsToWatch(project)) {
ContainerUtil.addIfNotNull(roots, LocalFileSystem.getInstance().findFileByPath(rootPath));
}
if (!roots.equals(mvcModuleStructureSynchronizer.myPluginRoots)) {
mvcModuleStructureSynchronizer.myPluginRoots = roots;
ApplicationManager.getApplication().invokeLater(() -> mvcModuleStructureSynchronizer.queue(UpdateProjectStructure, project));
}
}
}
};
abstract void doAction(Module module, MvcFramework framework);
}
private void updateProjectViewVisibility() {
if (ApplicationManager.getApplication().isUnitTestMode()) return;
StartupManager.getInstance(myProject).runWhenProjectIsInitialized(new DumbAwareRunnable() {
@Override
public void run() {
ApplicationManager.getApplication().invokeLater(() -> {
if (myProject.isDisposed()) return;
for (ToolWindowEP ep : ToolWindowEP.EP_NAME.getExtensions()) {
if (MvcToolWindowDescriptor.class.isAssignableFrom(ep.getFactoryClass())) {
MvcToolWindowDescriptor descriptor = (MvcToolWindowDescriptor)ep.getToolWindowFactory();
String id = descriptor.getToolWindowId();
boolean shouldShow = descriptor.value(myProject);
ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
ToolWindow toolWindow = toolWindowManager.getToolWindow(id);
if (shouldShow && toolWindow == null) {
toolWindow = toolWindowManager.registerToolWindow(id, true, ToolWindowAnchor.LEFT, myProject, true);
toolWindow.setIcon(descriptor.getFramework().getToolWindowIcon());
descriptor.createToolWindowContent(myProject, toolWindow);
}
else if (!shouldShow && toolWindow != null) {
toolWindowManager.unregisterToolWindow(id);
Disposer.dispose(toolWindow.getContentManager());
}
}
}
});
}
});
}
}
|
signed/intellij-community
|
plugins/groovy/src/org/jetbrains/plugins/groovy/mvc/MvcModuleStructureSynchronizer.java
|
Java
|
apache-2.0
| 17,969
|
package network
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"net/http"
)
// VirtualNetworkPeeringsClient is the network Client
type VirtualNetworkPeeringsClient struct {
BaseClient
}
// NewVirtualNetworkPeeringsClient creates an instance of the VirtualNetworkPeeringsClient client.
func NewVirtualNetworkPeeringsClient(subscriptionID string) VirtualNetworkPeeringsClient {
return NewVirtualNetworkPeeringsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewVirtualNetworkPeeringsClientWithBaseURI creates an instance of the VirtualNetworkPeeringsClient client.
func NewVirtualNetworkPeeringsClientWithBaseURI(baseURI string, subscriptionID string) VirtualNetworkPeeringsClient {
return VirtualNetworkPeeringsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate creates or updates a peering in the specified virtual network.
// Parameters:
// resourceGroupName - the name of the resource group.
// virtualNetworkName - the name of the virtual network.
// virtualNetworkPeeringName - the name of the peering.
// virtualNetworkPeeringParameters - parameters supplied to the create or update virtual network peering
// operation.
func (client VirtualNetworkPeeringsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string, virtualNetworkPeeringParameters VirtualNetworkPeering) (result VirtualNetworkPeeringsCreateOrUpdateFuture, err error) {
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, virtualNetworkName, virtualNetworkPeeringName, virtualNetworkPeeringParameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
result, err = client.CreateOrUpdateSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "CreateOrUpdate", result.Response(), "Failure sending request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client VirtualNetworkPeeringsClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string, virtualNetworkPeeringParameters VirtualNetworkPeering) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"virtualNetworkName": autorest.Encode("path", virtualNetworkName),
"virtualNetworkPeeringName": autorest.Encode("path", virtualNetworkPeeringName),
}
const APIVersion = "2018-05-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}", pathParameters),
autorest.WithJSON(virtualNetworkPeeringParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualNetworkPeeringsClient) CreateOrUpdateSender(req *http.Request) (future VirtualNetworkPeeringsCreateOrUpdateFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
err = autorest.Respond(resp, azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client VirtualNetworkPeeringsClient) CreateOrUpdateResponder(resp *http.Response) (result VirtualNetworkPeering, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes the specified virtual network peering.
// Parameters:
// resourceGroupName - the name of the resource group.
// virtualNetworkName - the name of the virtual network.
// virtualNetworkPeeringName - the name of the virtual network peering.
func (client VirtualNetworkPeeringsClient) Delete(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string) (result VirtualNetworkPeeringsDeleteFuture, err error) {
req, err := client.DeletePreparer(ctx, resourceGroupName, virtualNetworkName, virtualNetworkPeeringName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "Delete", nil, "Failure preparing request")
return
}
result, err = client.DeleteSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "Delete", result.Response(), "Failure sending request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client VirtualNetworkPeeringsClient) DeletePreparer(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"virtualNetworkName": autorest.Encode("path", virtualNetworkName),
"virtualNetworkPeeringName": autorest.Encode("path", virtualNetworkPeeringName),
}
const APIVersion = "2018-05-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualNetworkPeeringsClient) DeleteSender(req *http.Request) (future VirtualNetworkPeeringsDeleteFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
err = autorest.Respond(resp, azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client VirtualNetworkPeeringsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get gets the specified virtual network peering.
// Parameters:
// resourceGroupName - the name of the resource group.
// virtualNetworkName - the name of the virtual network.
// virtualNetworkPeeringName - the name of the virtual network peering.
func (client VirtualNetworkPeeringsClient) Get(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string) (result VirtualNetworkPeering, err error) {
req, err := client.GetPreparer(ctx, resourceGroupName, virtualNetworkName, virtualNetworkPeeringName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client VirtualNetworkPeeringsClient) GetPreparer(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"virtualNetworkName": autorest.Encode("path", virtualNetworkName),
"virtualNetworkPeeringName": autorest.Encode("path", virtualNetworkPeeringName),
}
const APIVersion = "2018-05-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualNetworkPeeringsClient) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client VirtualNetworkPeeringsClient) GetResponder(resp *http.Response) (result VirtualNetworkPeering, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List gets all virtual network peerings in a virtual network.
// Parameters:
// resourceGroupName - the name of the resource group.
// virtualNetworkName - the name of the virtual network.
func (client VirtualNetworkPeeringsClient) List(ctx context.Context, resourceGroupName string, virtualNetworkName string) (result VirtualNetworkPeeringListResultPage, err error) {
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx, resourceGroupName, virtualNetworkName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.vnplr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "List", resp, "Failure sending request")
return
}
result.vnplr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "List", resp, "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client VirtualNetworkPeeringsClient) ListPreparer(ctx context.Context, resourceGroupName string, virtualNetworkName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"virtualNetworkName": autorest.Encode("path", virtualNetworkName),
}
const APIVersion = "2018-05-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualNetworkPeeringsClient) ListSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client VirtualNetworkPeeringsClient) ListResponder(resp *http.Response) (result VirtualNetworkPeeringListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client VirtualNetworkPeeringsClient) listNextResults(lastResults VirtualNetworkPeeringListResult) (result VirtualNetworkPeeringListResult, err error) {
req, err := lastResults.virtualNetworkPeeringListResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client VirtualNetworkPeeringsClient) ListComplete(ctx context.Context, resourceGroupName string, virtualNetworkName string) (result VirtualNetworkPeeringListResultIterator, err error) {
result.page, err = client.List(ctx, resourceGroupName, virtualNetworkName)
return
}
|
linzhaoming/origin
|
vendor/github.com/Azure/azure-sdk-for-go/services/network/mgmt/2018-05-01/network/virtualnetworkpeerings.go
|
GO
|
apache-2.0
| 15,734
|
package org.genericsystem.cache;
import org.genericsystem.api.core.Snapshot;
import org.genericsystem.api.core.annotations.Components;
import org.genericsystem.api.core.annotations.Dependencies;
import org.genericsystem.api.core.annotations.InstanceClass;
import org.genericsystem.api.core.annotations.SystemGeneric;
import org.genericsystem.api.core.annotations.constraints.InstanceValueClassConstraint;
import org.genericsystem.api.core.annotations.constraints.SingularConstraint;
import org.genericsystem.cache.FileSystem.Directory;
import org.genericsystem.cache.FileSystem.FileType;
import org.genericsystem.common.Generic;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SystemGeneric
@InstanceValueClassConstraint(String.class)
@Dependencies(FileType.class)
@InstanceClass(Directory.class)
public class FileSystem implements Generic {
protected static Logger log = LoggerFactory.getLogger(FileSystem.class);
private static final String SEPARATOR = "/";
private static final byte[] EMPTY = "<html/>".getBytes();
public static class Directory implements Generic {
public Snapshot<File> getFiles() {
return (Snapshot) getHolders(getRoot().find(FileType.class));
}
public File getFile(String name) {
return (File) getHolders(getRoot().find(FileType.class)).stream().filter(x -> name.equals(x.getValue())).findFirst().orElse(null);
}
public File addFile(String name) {
return addFile(name, EMPTY);
}
public File addFile(String name, byte[] content) {
return ((File) addHolder(getRoot().find(FileType.class), name)).setContent(content);
}
public File setFile(String name) {
return setFile(name, EMPTY);
}
public File setFile(String name, byte[] content) {
return ((File) setHolder(getRoot().find(FileType.class), name)).setContent(content);
}
public Snapshot<Directory> getDirectories() {
return (Snapshot) getInheritings();
}
public Directory getDirectory(String name) {
return (Directory) getInheritings().stream().filter(x -> x.getValue().equals(name)).findFirst().orElse(null);
}
public Directory addDirectory(String name) {
return (Directory) getMeta().addInstance(this, name);
}
public Directory setDirectory(String name) {
return (Directory) getMeta().setInstance(this, name);
}
public String getShortPath() {
return (String) getValue();
}
}
@SystemGeneric
@Components(FileSystem.class)
@InstanceValueClassConstraint(String.class)
@InstanceClass(File.class)
@Dependencies(FileContent.class)
public static class FileType implements Generic {
}
public static class File implements Generic {
public byte[] getContent() {
return (byte[]) getHolders(getRoot().find(FileContent.class)).first().getValue();
}
public File setContent(byte[] content) {
setHolder(getRoot().find(FileContent.class), content);
return this;
}
public String getShortPath() {
return (String) getValue();
}
}
@SystemGeneric
@SingularConstraint
@Components(FileType.class)
@InstanceValueClassConstraint(byte[].class)
public static class FileContent implements Generic {
}
public Snapshot<Generic> getRootDirectories() {
return getInstances();
}
public Directory getRootDirectory(String name) {
return (Directory) getRootDirectories().stream().filter(x -> x.getValue().equals(name)).findFirst().orElse(null);
}
public Directory addRootDirectory(String name) {
if (getRootDirectory(name) != null)
throw new IllegalStateException("Root directory : " + name + " already exists");
return (Directory) addInstance(name);
}
public Directory setRootDirectory(String name) {
return (Directory) setInstance(name);
}
public byte[] getFileContent(String resource) {
if (resource.startsWith(SEPARATOR))
resource = resource.substring(1);
String[] files = resource.split(SEPARATOR);
Directory directory = getRootDirectory(files[0]);
if (directory == null)
return null;
for (int i = 1; i < files.length - 1; i++) {
directory = directory.getDirectory(files[i]);
if (directory == null)
return null;
}
File file = directory.getFile(files[files.length - 1]);
if (file == null)
return null;
return file.getContent();
}
public Generic setFile(String resource) {
return setFile(resource, EMPTY);
}
public Generic setFile(String resource, byte[] content) {
if (resource.startsWith(SEPARATOR))
resource = resource.substring(1);
String[] pathToResource = resource.split(SEPARATOR);
Directory directory = setRootDirectory(pathToResource[0]);
for (int i = 1; i < pathToResource.length - 1; i++)
directory = directory.setDirectory(pathToResource[i]);
return directory.setFile(pathToResource[pathToResource.length - 1], content);
}
}
|
genericsystem/genericsystem2015
|
gs-kernel/src/test/java/org/genericsystem/cache/FileSystem.java
|
Java
|
apache-2.0
| 4,717
|
# AUTOGENERATED FILE
FROM balenalib/ts4900-ubuntu:cosmic-build
ENV NODE_VERSION 15.6.0
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& echo "234871415c54174f91764f332a72631519a6af7b1a87797ad7c729855182f9cd node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \
&& echo "Running test-stack@node" \
&& chmod +x test-stack@node.sh \
&& bash test-stack@node.sh \
&& rm -rf test-stack@node.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu cosmic \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v15.6.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh
|
nghiant2710/base-images
|
balena-base-images/node/ts4900/ubuntu/cosmic/15.6.0/build/Dockerfile
|
Dockerfile
|
apache-2.0
| 2,757
|
/*
* Powered By cuichen
* Since 2014 - 2015
*/package com.seeyoui.kensite.framework.system.service;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.seeyoui.kensite.common.base.domain.Attributes;
import com.seeyoui.kensite.common.base.domain.TreeJson;
import com.seeyoui.kensite.common.base.service.BaseService;
import com.seeyoui.kensite.common.constants.StringConstant;
import com.seeyoui.kensite.common.exception.CRUDException;
import com.seeyoui.kensite.common.util.StringUtils;
import com.seeyoui.kensite.framework.system.domain.SysModulePermission;
import com.seeyoui.kensite.framework.system.domain.SysPermission;
import com.seeyoui.kensite.framework.system.persistence.SysModulePermissionMapper;
/**
* @author cuichen
* @version 1.0
* @since 1.0
*/
@Service
public class SysModulePermissionService extends BaseService {
@Autowired
private SysModulePermissionMapper sysModulePermissionMapper;
/**
* 查询数据TREE
* @param username
* @return
* @throws CRUDException
*/
public List<TreeJson> tree(SysModulePermission sysModulePermission) throws CRUDException {
List<SysPermission> mList = sysModulePermissionMapper.tree(sysModulePermission);
List<TreeJson> tList = new ArrayList<TreeJson>();
for(int i=0; i<mList.size(); i++) {
TreeJson tj = new TreeJson();
tj.setId(mList.get(i).getId());
tj.setText(mList.get(i).getName());
tj.setPid(StringConstant.ROOT_ID_32);
tj.setChecked(mList.get(i).getChecked());
Attributes attributes = new Attributes();
tj.setAttributes(attributes);
tList.add(tj);
}
TreeJson root = new TreeJson();
root.setId(StringConstant.ROOT_ID_32);
TreeJson.getTree(tList, root);
return root.getChildren();
}
/**
* 数据新增
* @param sysModulePermission
* @throws CRUDException
*/
public void save(SysModulePermission sysModulePermission) throws CRUDException {
sysModulePermissionMapper.delete(sysModulePermission.getModuleId());
String mpid = sysModulePermission.getPermissionId();
if(StringUtils.isNotBlank(mpid)) {
List<String> listId = Arrays.asList(mpid.split(","));
for(int i=0; i<listId.size(); i++) {
sysModulePermission.setPermissionId(listId.get(i));
sysModulePermissionMapper.save(sysModulePermission);
}
}
}
}
|
seeyoui/kensite_cms
|
src/main/java/com/seeyoui/kensite/framework/system/service/SysModulePermissionService.java
|
Java
|
apache-2.0
| 2,485
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.search.searches;
import com.intellij.openapi.application.DumbAwareSearchParameters;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.PsiReference;
import com.intellij.psi.search.*;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.*;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author max
*/
public class MethodReferencesSearch extends ExtensibleQueryFactory<PsiReference, MethodReferencesSearch.SearchParameters> {
public static final ExtensionPointName<QueryExecutor> EP_NAME = ExtensionPointName.create("com.intellij.methodReferencesSearch");
public static final MethodReferencesSearch INSTANCE = new MethodReferencesSearch();
public static class SearchParameters implements DumbAwareSearchParameters {
private final PsiMethod myMethod;
private final Project myProject;
private final SearchScope myScope;
private final boolean myStrictSignatureSearch;
private final SearchRequestCollector myOptimizer;
private final boolean isSharedOptimizer;
public SearchParameters(@NotNull PsiMethod method, @NotNull SearchScope scope, boolean strictSignatureSearch, @Nullable SearchRequestCollector optimizer) {
myMethod = method;
myScope = scope;
myStrictSignatureSearch = strictSignatureSearch;
isSharedOptimizer = optimizer != null;
myOptimizer = optimizer != null ? optimizer : new SearchRequestCollector(new SearchSession());
myProject = PsiUtilCore.getProjectInReadAction(method);
}
public SearchParameters(@NotNull PsiMethod method, @NotNull SearchScope scope, final boolean strict) {
this(method, scope, strict, null);
}
@NotNull
public Project getProject() {
return myProject;
}
@NotNull
public PsiMethod getMethod() {
return myMethod;
}
public boolean isStrictSignatureSearch() {
return myStrictSignatureSearch;
}
public SearchRequestCollector getOptimizer() {
return myOptimizer;
}
/**
* @return the user-visible search scope, most often "Project Files" or "Project and Libraries".
* Searchers most likely need to use {@link #getEffectiveSearchScope()}.
*/
public SearchScope getScopeDeterminedByUser() {
return myScope;
}
/**
* @return Same as {@link #getScopeDeterminedByUser()}. Searchers most likely need to use {@link #getEffectiveSearchScope()}.
*/
@Deprecated
@NotNull
public SearchScope getScope() {
return getScopeDeterminedByUser();
}
@NotNull
public SearchScope getEffectiveSearchScope () {
SearchScope accessScope = PsiSearchHelper.SERVICE.getInstance(myMethod.getProject()).getUseScope(myMethod);
return myScope.intersectWith(accessScope);
}
}
private MethodReferencesSearch() {}
public static Query<PsiReference> search(@NotNull PsiMethod method, SearchScope scope, final boolean strictSignatureSearch) {
return search(new SearchParameters(method, scope, strictSignatureSearch));
}
public static void searchOptimized(final PsiMethod method, SearchScope scope, final boolean strictSignatureSearch,
@NotNull SearchRequestCollector collector, final Processor<PsiReference> processor) {
searchOptimized(method, scope, strictSignatureSearch, collector, false, (psiReference, collector1) -> processor.process(psiReference));
}
public static void searchOptimized(final PsiMethod method, SearchScope scope, final boolean strictSignatureSearch,
SearchRequestCollector collector, final boolean inReadAction,
PairProcessor<PsiReference, SearchRequestCollector> processor) {
final SearchRequestCollector nested = new SearchRequestCollector(collector.getSearchSession());
collector.searchQuery(new QuerySearchRequest(search(new SearchParameters(method, scope, strictSignatureSearch, nested)), nested,
inReadAction, processor));
}
public static Query<PsiReference> search(final SearchParameters parameters) {
final Query<PsiReference> result = INSTANCE.createQuery(parameters);
if (parameters.isSharedOptimizer) {
return uniqueResults(result);
}
final SearchRequestCollector requests = parameters.getOptimizer();
Project project = PsiUtilCore.getProjectInReadAction(parameters.getMethod());
return uniqueResults(new MergeQuery<>(result, new SearchRequestQuery(project, requests)));
}
public static Query<PsiReference> search(final PsiMethod method, final boolean strictSignatureSearch) {
return search(method, GlobalSearchScope.allScope(PsiUtilCore.getProjectInReadAction(method)), strictSignatureSearch);
}
public static Query<PsiReference> search(final PsiMethod method) {
return search(method, true);
}
private static UniqueResultsQuery<PsiReference, ReferenceDescriptor> uniqueResults(@NotNull Query<PsiReference> composite) {
return new UniqueResultsQuery<>(composite, ContainerUtil.<ReferenceDescriptor>canonicalStrategy(), ReferenceDescriptor.MAPPER);
}
}
|
hurricup/intellij-community
|
java/java-indexing-api/src/com/intellij/psi/search/searches/MethodReferencesSearch.java
|
Java
|
apache-2.0
| 5,947
|
# Eriocaulon zollingerianoides Z.X.Zhang SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Liliopsida/Poales/Eriocaulaceae/Eriocaulon/Eriocaulon zollingerianoides/README.md
|
Markdown
|
apache-2.0
| 196
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.jobs.v4.model;
/**
* Request to update a batch of jobs.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Talent Solution API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class BatchUpdateJobsRequest extends com.google.api.client.json.GenericJson {
/**
* Required. The jobs to be updated. A maximum of 200 jobs can be updated in a batch.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Job> jobs;
/**
* Strongly recommended for the best service experience. Be aware that it will also increase
* latency when checking the status of a batch operation. If update_mask is provided, only the
* specified fields in Job are updated. Otherwise all the fields are updated. A field mask to
* restrict the fields that are updated. Only top level fields of Job are supported. If
* update_mask is provided, The Job inside JobResult will only contains fields that is updated,
* plus the Id of the Job. Otherwise, Job will include all fields, which can yield a very large
* response.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String updateMask;
/**
* Required. The jobs to be updated. A maximum of 200 jobs can be updated in a batch.
* @return value or {@code null} for none
*/
public java.util.List<Job> getJobs() {
return jobs;
}
/**
* Required. The jobs to be updated. A maximum of 200 jobs can be updated in a batch.
* @param jobs jobs or {@code null} for none
*/
public BatchUpdateJobsRequest setJobs(java.util.List<Job> jobs) {
this.jobs = jobs;
return this;
}
/**
* Strongly recommended for the best service experience. Be aware that it will also increase
* latency when checking the status of a batch operation. If update_mask is provided, only the
* specified fields in Job are updated. Otherwise all the fields are updated. A field mask to
* restrict the fields that are updated. Only top level fields of Job are supported. If
* update_mask is provided, The Job inside JobResult will only contains fields that is updated,
* plus the Id of the Job. Otherwise, Job will include all fields, which can yield a very large
* response.
* @return value or {@code null} for none
*/
public String getUpdateMask() {
return updateMask;
}
/**
* Strongly recommended for the best service experience. Be aware that it will also increase
* latency when checking the status of a batch operation. If update_mask is provided, only the
* specified fields in Job are updated. Otherwise all the fields are updated. A field mask to
* restrict the fields that are updated. Only top level fields of Job are supported. If
* update_mask is provided, The Job inside JobResult will only contains fields that is updated,
* plus the Id of the Job. Otherwise, Job will include all fields, which can yield a very large
* response.
* @param updateMask updateMask or {@code null} for none
*/
public BatchUpdateJobsRequest setUpdateMask(String updateMask) {
this.updateMask = updateMask;
return this;
}
@Override
public BatchUpdateJobsRequest set(String fieldName, Object value) {
return (BatchUpdateJobsRequest) super.set(fieldName, value);
}
@Override
public BatchUpdateJobsRequest clone() {
return (BatchUpdateJobsRequest) super.clone();
}
}
|
googleapis/google-api-java-client-services
|
clients/google-api-services-jobs/v4/1.31.0/com/google/api/services/jobs/v4/model/BatchUpdateJobsRequest.java
|
Java
|
apache-2.0
| 4,455
|
// Copyright © 2016 NAME HERE <EMAIL ADDRESS>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"fmt"
"github.com/fanux/pbrain/common"
"github.com/fanux/pbrain/plugins/decider"
"github.com/spf13/cobra"
)
// deciderCmd represents the decider command
var deciderCmd = &cobra.Command{
Use: "decider",
Short: "scale apps by app metrical",
Long: `app metrical is loadbalance info, or cpu memery use info`,
Run: func(cmd *cobra.Command, args []string) {
// Work your own magic here
fmt.Println("decider called")
basePlugin := common.GetBasePlugin(ManagerHost, ManagerPort, decider.PLUGIN_NAME)
RunPlugin(&decider.Decider{basePlugin, nil})
},
}
func init() {
RootCmd.AddCommand(deciderCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// deciderCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// deciderCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}
|
fanux/pbrain
|
cmd/decider.go
|
GO
|
apache-2.0
| 1,656
|
import React, {Component} from 'react';
class Signup extends Component {
handleSignup(e) {
e.preventDefault();
if ($('#user').val() === '' || $('#pass').val() === '' || $('#pass2').val() === '') {
handleError('Please fill out all fields.');
return false;
}
if ($('#pass').val() !== $('#pass2').val()) {
handleError('Please make sure the passwords match.');
return false;
}
sendAjax('POST', $('#signupForm').attr('action'), $('#signupForm').serialize(), redirect);
return false;
}
render() {
return (
<form id="signupForm" name="signupForm" onSubmit={this.handleSignup} action="/signup" method="POST" className="mainForm">
<div className="ui input textField">
<input id="user" type="text" name="username" placeholder="Username"/>
</div>
<div className="ui input textField">
<input id="pass" type="password" name="pass" placeholder="Password"/>
</div>
<div className="ui input textField">
<input id="pass2" type="password" name="pass2" placeholder="Confirm Password"/>
</div>
<input type="hidden" name="_csrf" value={this.props.csrf}/>
<button className="large ui orange button loginButton" type="submit" form="signupForm" value="Sign Up">Sign Up</button>
<div className="signUpBlurb">
Already have an account?
<a className="signUpLink" onClick={this.props.onLogin}> Login </a>
here!
</div>
</form>
);
}
}
export default Signup;
|
li-cai/folio2
|
client/login/Signup.js
|
JavaScript
|
apache-2.0
| 1,555
|
from citrination_client.search.pif.query.chemical.chemical_field_operation import ChemicalFieldOperation
from citrination_client.search.pif.query.core.base_object_query import BaseObjectQuery
from citrination_client.search.pif.query.core.field_operation import FieldOperation
class CompositionQuery(BaseObjectQuery):
"""
Class to query against a PIF Composition object.
"""
def __init__(self, element=None, actual_weight_percent=None, actual_atomic_percent=None,
ideal_weight_percent=None, ideal_atomic_percent=None, logic=None, tags=None,
length=None, offset=None):
"""
Constructor.
:param element: One or more :class:`ChemicalFieldOperation` operations against the element field.
:param actual_weight_percent: One or more :class:`FieldOperation` operations against the actual
weight percent field.
:param actual_atomic_percent: One or more :class:`FieldOperation` operations against the actual
atomic percent field.
:param ideal_weight_percent: One or more :class:`FieldOperation` operations against the ideal
weight percent field.
:param ideal_atomic_percent: One or more :class:`FieldOperation` operations against the ideal
atomic percent field.
:param logic: Logic for this filter. Must be equal to one of "MUST", "MUST_NOT", "SHOULD", or "OPTIONAL".
:param tags: One or more :class:`FieldOperation` operations against the tags field.
:param length: One or more :class:`FieldOperation` operations against the length field.
:param offset: One or more :class:`FieldOperation` operations against the offset field.
"""
super(CompositionQuery, self).__init__(logic=logic, tags=tags, length=length, offset=offset)
self._element = None
self.element = element
self._actual_weight_percent = None
self.actual_weight_percent = actual_weight_percent
self._actual_atomic_percent = None
self.actual_atomic_percent = actual_atomic_percent
self._ideal_weight_percent = None
self.ideal_weight_percent = ideal_weight_percent
self._ideal_atomic_percent = None
self.ideal_atomic_percent = ideal_atomic_percent
@property
def element(self):
return self._element
@element.setter
def element(self, element):
self._element = self._get_object(ChemicalFieldOperation, element)
@element.deleter
def element(self):
self._element = None
@property
def actual_weight_percent(self):
return self._actual_weight_percent
@actual_weight_percent.setter
def actual_weight_percent(self, actual_weight_percent):
self._actual_weight_percent = self._get_object(FieldOperation, actual_weight_percent)
@actual_weight_percent.deleter
def actual_weight_percent(self):
self._actual_weight_percent = None
@property
def actual_atomic_percent(self):
return self._actual_atomic_percent
@actual_atomic_percent.setter
def actual_atomic_percent(self, actual_atomic_percent):
self._actual_atomic_percent = self._get_object(FieldOperation, actual_atomic_percent)
@actual_atomic_percent.deleter
def actual_atomic_percent(self):
self._actual_atomic_percent = None
@property
def ideal_weight_percent(self):
return self._ideal_weight_percent
@ideal_weight_percent.setter
def ideal_weight_percent(self, ideal_weight_percent):
self._ideal_weight_percent = self._get_object(FieldOperation, ideal_weight_percent)
@ideal_weight_percent.deleter
def ideal_weight_percent(self):
self._ideal_weight_percent = None
@property
def ideal_atomic_percent(self):
return self._ideal_atomic_percent
@ideal_atomic_percent.setter
def ideal_atomic_percent(self, ideal_atomic_percent):
self._ideal_atomic_percent = self._get_object(FieldOperation, ideal_atomic_percent)
@ideal_atomic_percent.deleter
def ideal_atomic_percent(self):
self._ideal_atomic_percent = None
|
calfonso/python-citrination-client
|
citrination_client/search/pif/query/chemical/composition_query.py
|
Python
|
apache-2.0
| 4,113
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#pragma once
#include <aws/elasticbeanstalk/ElasticBeanstalk_EXPORTS.h>
#include <aws/elasticbeanstalk/model/ApplicationDescription.h>
#include <aws/elasticbeanstalk/model/ResponseMetadata.h>
namespace Aws
{
template<typename RESULT_TYPE>
class AmazonWebServiceResult;
namespace Utils
{
namespace Xml
{
class XmlDocument;
} // namespace Xml
} // namespace Utils
namespace ElasticBeanstalk
{
namespace Model
{
/**
* <p>Result message containing a single description of an application.</p>
*/
class AWS_ELASTICBEANSTALK_API UpdateApplicationResult
{
public:
UpdateApplicationResult();
UpdateApplicationResult(const AmazonWebServiceResult<Aws::Utils::Xml::XmlDocument>& result);
UpdateApplicationResult& operator=(const AmazonWebServiceResult<Aws::Utils::Xml::XmlDocument>& result);
/**
* <p> The <a>ApplicationDescription</a> of the application. </p>
*/
inline const ApplicationDescription& GetApplication() const{ return m_application; }
/**
* <p> The <a>ApplicationDescription</a> of the application. </p>
*/
inline void SetApplication(const ApplicationDescription& value) { m_application = value; }
/**
* <p> The <a>ApplicationDescription</a> of the application. </p>
*/
inline void SetApplication(ApplicationDescription&& value) { m_application = value; }
/**
* <p> The <a>ApplicationDescription</a> of the application. </p>
*/
inline UpdateApplicationResult& WithApplication(const ApplicationDescription& value) { SetApplication(value); return *this;}
/**
* <p> The <a>ApplicationDescription</a> of the application. </p>
*/
inline UpdateApplicationResult& WithApplication(ApplicationDescription&& value) { SetApplication(value); return *this;}
inline const ResponseMetadata& GetResponseMetadata() const{ return m_responseMetadata; }
inline void SetResponseMetadata(const ResponseMetadata& value) { m_responseMetadata = value; }
inline void SetResponseMetadata(ResponseMetadata&& value) { m_responseMetadata = value; }
inline UpdateApplicationResult& WithResponseMetadata(const ResponseMetadata& value) { SetResponseMetadata(value); return *this;}
inline UpdateApplicationResult& WithResponseMetadata(ResponseMetadata&& value) { SetResponseMetadata(value); return *this;}
private:
ApplicationDescription m_application;
ResponseMetadata m_responseMetadata;
};
} // namespace Model
} // namespace ElasticBeanstalk
} // namespace Aws
|
ambasta/aws-sdk-cpp
|
aws-cpp-sdk-elasticbeanstalk/include/aws/elasticbeanstalk/model/UpdateApplicationResult.h
|
C
|
apache-2.0
| 3,096
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iot.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.iot.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* TopicRule JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class TopicRuleJsonUnmarshaller implements Unmarshaller<TopicRule, JsonUnmarshallerContext> {
public TopicRule unmarshall(JsonUnmarshallerContext context) throws Exception {
TopicRule topicRule = new TopicRule();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("ruleName", targetDepth)) {
context.nextToken();
topicRule.setRuleName(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("sql", targetDepth)) {
context.nextToken();
topicRule.setSql(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("description", targetDepth)) {
context.nextToken();
topicRule.setDescription(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("createdAt", targetDepth)) {
context.nextToken();
topicRule.setCreatedAt(DateJsonUnmarshallerFactory.getInstance("unixTimestamp").unmarshall(context));
}
if (context.testExpression("actions", targetDepth)) {
context.nextToken();
topicRule.setActions(new ListUnmarshaller<Action>(ActionJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("ruleDisabled", targetDepth)) {
context.nextToken();
topicRule.setRuleDisabled(context.getUnmarshaller(Boolean.class).unmarshall(context));
}
if (context.testExpression("awsIotSqlVersion", targetDepth)) {
context.nextToken();
topicRule.setAwsIotSqlVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("errorAction", targetDepth)) {
context.nextToken();
topicRule.setErrorAction(ActionJsonUnmarshaller.getInstance().unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return topicRule;
}
private static TopicRuleJsonUnmarshaller instance;
public static TopicRuleJsonUnmarshaller getInstance() {
if (instance == null)
instance = new TopicRuleJsonUnmarshaller();
return instance;
}
}
|
aws/aws-sdk-java
|
aws-java-sdk-iot/src/main/java/com/amazonaws/services/iot/model/transform/TopicRuleJsonUnmarshaller.java
|
Java
|
apache-2.0
| 4,376
|
package com.bq.corbel.resources.rem.request.builder;
import com.bq.corbel.lib.queries.jaxrs.QueryParameters;
import com.bq.corbel.lib.queries.request.Aggregation;
import com.bq.corbel.lib.queries.request.Pagination;
import com.bq.corbel.lib.queries.request.ResourceQuery;
import com.bq.corbel.lib.queries.request.Search;
import com.bq.corbel.lib.queries.request.Sort;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
/**
* @author Rubén Carrasco
*
*/
public class QueryParametersBuilder {
private Pagination pagination;
private Sort sort;
private List<ResourceQuery> queries;
private List<ResourceQuery> conditions;
private Aggregation aggregation;
private Search search;
public QueryParametersBuilder() {}
public QueryParametersBuilder(QueryParameters queryParameters) {
this.pagination = queryParameters.getPagination();
this.sort = queryParameters.getSort().get();
this.queries = queryParameters.getQueries().get();
this.conditions = queryParameters.getConditions().get();
this.aggregation = queryParameters.getAggregation().get();
this.search = queryParameters.getSearch().get();
}
public QueryParameters build() {
return new QueryParameters(pagination, Optional.ofNullable(sort), Optional.ofNullable(queries), Optional.ofNullable(conditions),
Optional.ofNullable(aggregation), Optional.ofNullable(search));
}
public QueryParametersBuilder pagination(Pagination pagination) {
this.pagination = pagination;
return this;
}
public QueryParametersBuilder sort(Sort sort) {
this.sort = sort;
return this;
}
public QueryParametersBuilder queries(List<ResourceQuery> queries) {
this.queries = queries;
return this;
}
public QueryParametersBuilder queries(ResourceQuery... queries) {
this.queries = Arrays.asList(queries);
return this;
}
public QueryParametersBuilder query(ResourceQuery query) {
if (queries == null) {
queries = new ArrayList<>();
}
queries.add(query);
return this;
}
public QueryParametersBuilder conditions(List<ResourceQuery> conditions) {
this.conditions = conditions;
return this;
}
public QueryParametersBuilder conditions(ResourceQuery... conditions) {
this.conditions = Arrays.asList(conditions);
return this;
}
public QueryParametersBuilder condition(ResourceQuery condition) {
if (conditions == null) {
conditions = new ArrayList<>();
}
conditions.add(condition);
return this;
}
public QueryParametersBuilder aggregation(Aggregation aggregation) {
this.aggregation = aggregation;
return this;
}
public QueryParametersBuilder search(Search search) {
this.search = search;
return this;
}
}
|
bq/corbel
|
rem-api/src/main/java/com/bq/corbel/resources/rem/request/builder/QueryParametersBuilder.java
|
Java
|
apache-2.0
| 2,999
|
/*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.haulmont.cuba.web.widgets.client.groupbox;
import com.google.gwt.aria.client.Roles;
import com.google.gwt.dom.client.Style;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Event;
import com.haulmont.cuba.web.widgets.CubaGroupBox;
import com.haulmont.cuba.web.widgets.client.Tools;
import com.vaadin.client.*;
import com.vaadin.client.communication.StateChangeEvent;
import com.vaadin.client.ui.VPanel;
import com.vaadin.client.ui.panel.PanelConnector;
import com.vaadin.shared.ui.Connect;
import com.vaadin.shared.ui.MarginInfo;
@Connect(CubaGroupBox.class)
public class CubaGroupBoxConnector extends PanelConnector {
public static final String CONTEXT_HELP_CLASSNAME = "c-context-help-button";
public static final String CONTEXT_HELP_CLICKABLE_CLASSNAME = "c-context-help-button-clickable";
protected boolean widgetInitialized = false;
@Override
public CubaGroupBoxWidget getWidget() {
return (CubaGroupBoxWidget) super.getWidget();
}
@Override
public void init() {
super.init();
getWidget().expandHandler = expanded ->
getRpcProxy(CubaGroupBoxServerRpc.class).expandStateChanged(expanded);
}
@Override
public void onUnregister() {
super.onUnregister();
if (!getState().showAsPanel && widgetInitialized) {
LayoutManager layoutManager = getLayoutManager();
CubaGroupBoxWidget widget = getWidget();
layoutManager.unregisterDependency(this, widget.captionStartDeco);
layoutManager.unregisterDependency(this, widget.captionEndDeco);
layoutManager.unregisterDependency(this, widget.captionTextNode);
}
}
@Override
public CubaGroupBoxState getState() {
return (CubaGroupBoxState) super.getState();
}
@Override
public void updateFromUIDL(UIDL uidl, ApplicationConnection client) {
super.updateFromUIDL(uidl, client);
if (!getState().showAsPanel) {
// replace VPanel class names
CubaGroupBoxWidget widget = getWidget();
Tools.replaceClassNames(widget.captionNode, VPanel.CLASSNAME, widget.getStylePrimaryName());
Tools.replaceClassNames(widget.captionWrap, VPanel.CLASSNAME, widget.getStylePrimaryName());
Tools.replaceClassNames(widget.contentNode, VPanel.CLASSNAME, widget.getStylePrimaryName());
Tools.replaceClassNames(widget.bottomDecoration, VPanel.CLASSNAME, widget.getStylePrimaryName());
Tools.replaceClassNames(widget.getElement(), VPanel.CLASSNAME, widget.getStylePrimaryName());
}
}
@Override
public void layout() {
if (!getState().showAsPanel) {
layoutGroupBox();
} else {
super.layout();
}
}
protected void layoutGroupBox() {
CubaGroupBoxWidget panel = getWidget();
LayoutManager layoutManager = getLayoutManager();
if (isBordersVisible()) {
int captionWidth = layoutManager.getOuterWidth(panel.captionNode);
int captionStartWidth = layoutManager.getInnerWidth(panel.captionStartDeco);
int totalMargin = captionWidth + captionStartWidth;
panel.captionNode.getStyle().setWidth(captionWidth, Unit.PX);
panel.captionWrap.getStyle().setPaddingLeft(totalMargin, Unit.PX);
panel.captionStartDeco.getStyle().setMarginLeft(0 - totalMargin, Unit.PX);
}
Profiler.enter("CubaGroupBoxConnector.layout getHeights");
// Haulmont API get max height of caption components
int top = layoutManager.getOuterHeight(panel.captionNode);
top = Math.max(layoutManager.getOuterHeight(panel.captionStartDeco), top);
top = Math.max(layoutManager.getOuterHeight(panel.captionEndDeco), top);
int bottom = layoutManager.getInnerHeight(panel.bottomDecoration);
Profiler.leave("PanelConnector.layout getHeights");
Style style = panel.getElement().getStyle();
int paddingTop = 0;
int paddingBottom = 0;
if (panel.hasAnyOuterMargin()) {
Profiler.enter("PanelConnector.layout get values from styles");
// Clear previously set values
style.clearPaddingTop();
style.clearPaddingBottom();
// Calculate padding from styles
ComputedStyle computedStyle = new ComputedStyle(panel.getElement());
paddingTop = computedStyle.getIntProperty("padding-top");
paddingBottom = computedStyle.getIntProperty("padding-bottom");
Profiler.leave("PanelConnector.layout get values from styles");
}
Profiler.enter("PanelConnector.layout modify style");
panel.captionWrap.getStyle().setMarginTop(-top, Style.Unit.PX);
panel.bottomDecoration.getStyle().setMarginBottom(-bottom, Style.Unit.PX);
style.setPaddingTop(top + paddingTop, Style.Unit.PX);
style.setPaddingBottom(bottom + paddingBottom, Style.Unit.PX);
Profiler.leave("PanelConnector.layout modify style");
// Update scroll positions
Profiler.enter("PanelConnector.layout update scroll positions");
panel.contentNode.setScrollTop(panel.scrollTop);
panel.contentNode.setScrollLeft(panel.scrollLeft);
Profiler.leave("PanelConnector.layout update scroll positions");
// Read actual value back to ensure update logic is correct
Profiler.enter("PanelConnector.layout read scroll positions");
panel.scrollTop = panel.contentNode.getScrollTop();
panel.scrollLeft = panel.contentNode.getScrollLeft();
Profiler.leave("PanelConnector.layout read scroll positions");
}
@Override
public void onStateChanged(StateChangeEvent stateChangeEvent) {
super.onStateChanged(stateChangeEvent);
CubaGroupBoxWidget widget = getWidget();
if (!widgetInitialized) {
widget.init();
if (!getState().showAsPanel) {
LayoutManager layoutManager = getLayoutManager();
layoutManager.registerDependency(this, widget.captionStartDeco);
layoutManager.registerDependency(this, widget.captionEndDeco);
layoutManager.registerDependency(this, widget.captionTextNode);
}
widgetInitialized = true;
}
widget.setCollapsable(getState().collapsable);
widget.setExpanded(getState().expanded);
widget.setShowAsPanel(getState().showAsPanel);
if (!getState().showAsPanel) {
widget.setOuterMargin(new MarginInfo(getState().outerMarginsBitmask));
}
if (stateChangeEvent.hasPropertyChanged("caption")) {
updateCaptionNodeWidth(widget);
}
if (isContextHelpIconEnabled(getState())) {
if (getWidget().contextHelpIcon == null) {
getWidget().contextHelpIcon = DOM.createSpan();
getWidget().contextHelpIcon.setInnerHTML("?");
getWidget().contextHelpIcon.setClassName(CONTEXT_HELP_CLASSNAME);
if (hasContextHelpIconListeners(getState())) {
getWidget().contextHelpIcon.addClassName(CONTEXT_HELP_CLICKABLE_CLASSNAME);
}
Roles.getTextboxRole().setAriaHiddenState(getWidget().contextHelpIcon, true);
getWidget().captionNode.appendChild(getWidget().contextHelpIcon);
DOM.sinkEvents(getWidget().contextHelpIcon, VTooltip.TOOLTIP_EVENTS | Event.ONCLICK);
getWidget().contextHelpClickHandler = this::contextHelpIconClick;
} else {
getWidget().contextHelpIcon.getStyle().clearDisplay();
updateCaptionNodeWidth(widget);
}
} else if (getWidget().contextHelpIcon != null) {
getWidget().contextHelpIcon.getStyle().setDisplay(Style.Display.NONE);
updateCaptionNodeWidth(widget);
}
}
protected void updateCaptionNodeWidth(CubaGroupBoxWidget widget) {
widget.captionNode.getStyle().clearWidth();
getLayoutManager().setNeedsMeasure(this);
}
protected boolean isBordersVisible() {
CubaGroupBoxWidget panel = getWidget();
return panel.captionStartDeco.getOffsetWidth() > 0 || panel.captionEndDeco.getOffsetWidth() > 0;
}
}
|
dimone-kun/cuba
|
modules/web-toolkit/src/com/haulmont/cuba/web/widgets/client/groupbox/CubaGroupBoxConnector.java
|
Java
|
apache-2.0
| 9,055
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_03) on Mon Nov 19 21:41:13 CET 2007 -->
<TITLE>
org.springframework.web.servlet.view.document Class Hierarchy (Spring Framework API 2.5)
</TITLE>
<META NAME="date" CONTENT="2007-11-19">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="org.springframework.web.servlet.view.document Class Hierarchy (Spring Framework API 2.5)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
<a href="http://www.springframework.org/" target="_top">The Spring Framework</a></EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../org/springframework/web/servlet/view/package-tree.html"><B>PREV</B></A>
<A HREF="../../../../../../org/springframework/web/servlet/view/freemarker/package-tree.html"><B>NEXT</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/springframework/web/servlet/view/document/package-tree.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
Hierarchy For Package org.springframework.web.servlet.view.document
</H2>
</CENTER>
<DL>
<DT><B>Package Hierarchies:</B><DD><A HREF="../../../../../../overview-tree.html">All Packages</A></DL>
<HR>
<H2>
Class Hierarchy
</H2>
<UL>
<LI TYPE="circle">java.lang.<A HREF="http://java.sun.com/javase/6/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang"><B>Object</B></A><UL>
<LI TYPE="circle">org.springframework.context.support.<A HREF="../../../../../../org/springframework/context/support/ApplicationObjectSupport.html" title="class in org.springframework.context.support"><B>ApplicationObjectSupport</B></A> (implements org.springframework.context.<A HREF="../../../../../../org/springframework/context/ApplicationContextAware.html" title="interface in org.springframework.context">ApplicationContextAware</A>)
<UL>
<LI TYPE="circle">org.springframework.web.context.support.<A HREF="../../../../../../org/springframework/web/context/support/WebApplicationObjectSupport.html" title="class in org.springframework.web.context.support"><B>WebApplicationObjectSupport</B></A> (implements org.springframework.web.context.<A HREF="../../../../../../org/springframework/web/context/ServletContextAware.html" title="interface in org.springframework.web.context">ServletContextAware</A>)
<UL>
<LI TYPE="circle">org.springframework.web.servlet.view.<A HREF="../../../../../../org/springframework/web/servlet/view/AbstractView.html" title="class in org.springframework.web.servlet.view"><B>AbstractView</B></A> (implements org.springframework.beans.factory.<A HREF="../../../../../../org/springframework/beans/factory/BeanNameAware.html" title="interface in org.springframework.beans.factory">BeanNameAware</A>, org.springframework.web.servlet.<A HREF="../../../../../../org/springframework/web/servlet/View.html" title="interface in org.springframework.web.servlet">View</A>)
<UL>
<LI TYPE="circle">org.springframework.web.servlet.view.document.<A HREF="../../../../../../org/springframework/web/servlet/view/document/AbstractExcelView.html" title="class in org.springframework.web.servlet.view.document"><B>AbstractExcelView</B></A><LI TYPE="circle">org.springframework.web.servlet.view.document.<A HREF="../../../../../../org/springframework/web/servlet/view/document/AbstractJExcelView.html" title="class in org.springframework.web.servlet.view.document"><B>AbstractJExcelView</B></A><LI TYPE="circle">org.springframework.web.servlet.view.document.<A HREF="../../../../../../org/springframework/web/servlet/view/document/AbstractPdfView.html" title="class in org.springframework.web.servlet.view.document"><B>AbstractPdfView</B></A></UL>
</UL>
</UL>
</UL>
</UL>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
<a href="http://www.springframework.org/" target="_top">The Spring Framework</a></EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../org/springframework/web/servlet/view/package-tree.html"><B>PREV</B></A>
<A HREF="../../../../../../org/springframework/web/servlet/view/freemarker/package-tree.html"><B>NEXT</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/springframework/web/servlet/view/document/package-tree.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
<i>Copyright © 2002-2007 <a href=http://www.springframework.org/ target=_top>The Spring Framework</a>.</i>
</BODY>
</HTML>
|
mattxia/spring-2.5-analysis
|
docs/api/org/springframework/web/servlet/view/document/package-tree.html
|
HTML
|
apache-2.0
| 8,931
|
# ラズパイ x ソラコムキャンペーン 植物観察キット 〜後片付けマニュアル〜
## <a name="overview">はじめに</a>
植物観察キットのハンズオンお疲れ様でした!このドキュメントでは、植物観察ハンズオンを終了する際の手順を説明します。
ハンズオンキットを継続的に使う予定がない方は、このドキュメントに沿って設定の削除をお願いします。このハンズオンで行ったプログラムの設定や、ソラコムサービスの設定をクリアすることで、不要な通信と料金の発生を防ぎます。
**SORACOM サービスの設定無効化**
ハンズオンで使った各種サービスを無効化し、課金を停止します。
- [各種サービスの設定を無効化](#clear_service_settings)
**Raspberry Piの設定削除**
Raspberry Piの設定を削除し、定期的なデータアップロードや自動接続を無効化します。
- OSイメージを初期化して問題ない場合→[「方法1: Raspberry PiのOSイメージを初期化する」](#reset_os) へ。
- OSイメージを初期化したくない場合→[方法2: データの自動送信・自動的に3G接続する設定を削除する](#clear_os_settings) へ。
**SIMの休止・解約**
★ハンズオンキットのSIMは、USBドングルだけでなくSIMフリーのスマートフォン等、他のデバイスでも利用可能です。よろしければ、ぜひ継続して使ってみてください。
しかし、SIMをしばらく使わない・今後使う予定が無い方は、SORACOMサービスの設定無効化・Raspberry Piの後片付けと合わせて、こちらの手順も実行ください。SIMの設定を変更し、通信できないようにします。
- SIMをしばらく使わないが、後で使いたい場合:[方法1: SIMを「休止中」状態にセットする](#inactive_sim)
- SIMを今後使う予定がない場合:[方法2: SIMを解約する](#terminate_sim)
## <a name="clear_service_settings">SORACOM サービスの設定無効化</a>
### Beamの設定を削除する
アカウント作成後1年間は無償枠で決められたリクエスト数は無料でご利用になれますが、今後使わないのであればGroupの設定でBeamの設定を無効化しましょう。
ユーザコンソールのグループ設定画面を開き、「SORACOM Beam設定」をクリックします。続いて「ー」マークのボタンをクリックして、さらに設定の右側に表示される「削除」をクリックしてください。これで設定の削除完了です。

### Endorseの設定を無効化する
アカウント作成後1年間はSIM 1枚までは無償枠で無料で利用できますが、念のためGroupの設定でEndorseの設定をOFFにしておきましょう。
ユーザコンソールのグループ設定画面を開き、「SORACOM Endose設定」をクリックします。設定のスイッチを「OFF」にセットし、「保存」ボタンをクリックしてください。これで設定の無効化完了です。

## <a name="reset_raspberry_pi">Raspberry Piの設定削除</a>
### <a name="reset_os">方法1: Raspberry PiのOSイメージを初期化する</a>
Raspberry Pi からSDカードを取り外し、[Raspberry Piのセットアップ手順](https://github.com/soracom/handson/blob/master/setup/setup.md#section2) に沿ってSDカードにOSイメージを書き込んでください。
これでSDカードのデータが新しいOSイメージで上書きされ、OSはクリーンな状態に戻ります。
### <a name="clear_os_settings">方法2: データの自動送信・3G接続の自動実行の設定を削除する</a>
#### データの自動送信の設定を削除する
データの自動送信を行っているcrontabの設定を削除します。
まず、Raspberry Pi にSSHログインし、以下のコマンドを実行してください。これでcrontabの設定がすべて削除されます。
```
pi@raspberrypi:~ $ crontab -r
```
続いて、次のコマンドを実行してcrontabの内容を表示し、設定が削除されていることを確認します。```no crontab for pi```と表示されれば設定の削除完了です。
```
pi@raspberrypi:~ $ crontab -l
no crontab for pi (← このように表示されたら削除完了です。)
```
#### 自動的に3G接続する設定を削除する
Raspberry Pi 起動のタイミングでconnect_air.shを自動実行する設定(USBドングルによる自動接続)を削除します。
```
pi@raspberrypi:~ $ sudo nano /etc/rc.local (←/etc/rc.local の編集画面を開きます)
```
* 編集画面が開いたら、 ```/usr/local/sbin/connect_air.sh &``` と書かれている行を削除します。
* 削除できたら[Ctrl+O]を押し、さらに[Enter]を押して保存します。
* 保存できたら[Ctrl+X]でnanoを閉じて、設定の削除完了です。
## <a name="sim">SIMの休止・解約</a>
SIMの通信は、SIMのステータスを休止・解約することで通信を止めることができます。どちらにするかは、今後の利用予定に合わせて選択ください。
### <a name="inactive_sim">SIMの休止</a>
そのSIMでは通信できない状態となりますが、契約は維持され、**1日10円の基本料金が発生** します。再びSIMを使いたくなったら「使用開始」ステータスにすることで利用再開できます。
SIMを休止中にセットするには、ユーザコンソールにて、休止したいSIMにチェックマークを入れ、「詳細」をクリックします。続いて、「休止」を選択すると、SIMの状態が「休止中」となります。

### <a name="terminate_sim">SIMの解約</a>
SIM自体の契約を解約をします。解約以降は、基本料金・データ通信料金ともに停止します。ただし、**一度「解約」を行うと二度とそのSIMを利用することができません。** 再びAir SIMが必要となった際には、新しいSIMをご購入ください。
SIMを解約するには、ユーザコンソールにて、解約したいAir SIM にチェックを入れ[操作] ボタンをクリックします。続いて[解約] をクリックします。

確認画面が表示されたら、「解約プロテクション」を「OFF」にセットし[解約する] ボタンをクリックすると、SIMの解約完了です。

---
これでハンズオンの後片付けは終わりです。ハンズオンでお伝えした内容が皆さんのお役に立てば幸いです。
|
soracom/handson
|
plant-observation/clean-up.md
|
Markdown
|
apache-2.0
| 6,924
|
package main
import (
"github.com/gopher-net/gnet-ctl/Godeps/_workspace/src/github.com/Sirupsen/logrus"
)
var log = logrus.New()
func init() {
log.Formatter = new(logrus.JSONFormatter)
log.Formatter = new(logrus.TextFormatter) // default
}
func main() {
defer func() {
err := recover()
if err != nil {
log.WithFields(logrus.Fields{
"omg": true,
"err": err,
"number": 100,
}).Fatal("The ice breaks!")
}
}()
log.WithFields(logrus.Fields{
"animal": "walrus",
"size": 10,
}).Info("A group of walrus emerges from the ocean")
log.WithFields(logrus.Fields{
"omg": true,
"number": 122,
}).Warn("The group's number increased tremendously!")
log.WithFields(logrus.Fields{
"animal": "orca",
"size": 9009,
}).Panic("It's over 9000!")
}
|
gopher-net/gnet-ctl
|
Godeps/_workspace/src/github.com/Sirupsen/logrus/examples/basic/basic.go
|
GO
|
apache-2.0
| 791
|
namespace Climbing.Web.Tests.Unit
{
using System;
using Climbing.Web.Utilities.Mapper;
using Xunit;
public class AutoMapperTests
{
[Theory]
[AutoMoqData]
public void ShouldMap(ClassFrom from)
{
// Arrange
AutoMapper.Setup((ClassFrom cf) => cf.Field).To((ClassTo ct) => ct.FieldTo);
AutoMapper.Setup((ClassFrom cf) => cf.Property).To((ClassTo ct) => ct.Property1);
AutoMapper.Setup((ClassFrom cf) => $"{cf.Field}|{cf.Property}").To((ClassTo ct) => ct.Property2);
// Act
var res = AutoMapper.Map<ClassTo>(from);
// Assert
Assert.Equal(from.Field, res.FieldTo);
Assert.Equal(from.Property, res.Property1);
Assert.Equal($"{from.Field}|{from.Property}", res.Property2);
}
public sealed class ClassFrom
{
#pragma warning disable SA1401 // Fields should be private
internal string Field = Guid.NewGuid().ToString();
#pragma warning restore SA1401 // Fields should be private
public string Property { get; set; }
}
public sealed class ClassTo
{
#pragma warning disable SA1401 // Fields should be private
public string FieldTo = "11";
#pragma warning restore SA1401 // Fields should be private
public string Property1 { get; set; }
public string Property2 { get; set; }
}
}
}
|
ivankaurov/ClimbingCompetitionWeb
|
WebClimbingNew/Tests.Unit/AutoMapperTests.cs
|
C#
|
apache-2.0
| 1,466
|
/**
* Copyright 2009-2012 Ibrahim Chaehoi
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.jawr.web.resource.handler.reader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import javax.servlet.ServletContext;
import net.jawr.web.JawrConstant;
import net.jawr.web.config.JawrConfig;
import net.jawr.web.exception.ResourceNotFoundException;
import net.jawr.web.resource.FileNameUtils;
import net.jawr.web.resource.bundle.generator.BaseResourceGenerator;
import net.jawr.web.resource.bundle.generator.GeneratorRegistry;
import net.jawr.web.resource.handler.reader.grails.GrailsServletContextResourceReader;
import net.jawr.web.servlet.util.ImageMIMETypesSupport;
import net.jawr.web.util.StringUtils;
import org.apache.log4j.Logger;
/**
* This class defines the manager for resource reader.
*
* @author Ibrahim Chaehoi
*/
public class ServletContextResourceReaderHandler implements ResourceReaderHandler {
/** The logger */
private static final Logger LOGGER = Logger.getLogger(ServletContextResourceReaderHandler.class);
/** The servlet context */
private ServletContext servletContext;
/** The working directory */
private String workingDirectory;
/** The generator registry */
private GeneratorRegistry generatorRegistry;
/** The list of resource readers */
private List<TextResourceReader> resourceReaders = new ArrayList<TextResourceReader>();
/** The list of stream resource readers */
private List<StreamResourceReader> streamResourceReaders = new ArrayList<StreamResourceReader>();
/** The list of resource info providers */
private List<ResourceBrowser> resourceInfoProviders = new ArrayList<ResourceBrowser>();
/** The allowed file extension */
private List<String> allowedExtensions = new ArrayList<String>();
/**
* Constructor
* @param servletContext the servlet context
* @param jawrConfig the Jawr config
* @param generatorRegistry the generator registry
* @throws IOException if an IOException occurs.
*/
public ServletContextResourceReaderHandler(ServletContext servletContext,
JawrConfig jawrConfig, GeneratorRegistry generatorRegistry) throws IOException {
String tempWorkingDirectory = ((File) servletContext.getAttribute(JawrConstant.SERVLET_CONTEXT_TEMPDIR)).getCanonicalPath();
if(jawrConfig.getUseBundleMapping() && StringUtils.isNotEmpty(jawrConfig.getJawrWorkingDirectory())){
tempWorkingDirectory = jawrConfig.getJawrWorkingDirectory();
}
this.servletContext = servletContext;
this.generatorRegistry = generatorRegistry;
this.generatorRegistry.setResourceReaderHandler(this);
if (tempWorkingDirectory.startsWith(JawrConstant.FILE_URI_PREFIX)) {
tempWorkingDirectory = tempWorkingDirectory.substring(JawrConstant.FILE_URI_PREFIX.length());
}
// add the default extension
allowedExtensions.addAll(JawrConstant.DEFAULT_RESOURCE_EXTENSIONS);
if(JawrConstant.IMG_TYPE.equals(jawrConfig.getResourceType())){
for (Object key : ImageMIMETypesSupport.getSupportedProperties(JawrConfig.class).keySet()) {
if(!this.allowedExtensions.contains((String) key)){
this.allowedExtensions.add((String) key);
}
}
}else{
allowedExtensions.add(jawrConfig.getResourceType());
}
this.workingDirectory = tempWorkingDirectory;
ServletContextResourceReader rd = null;
// In grails apps, the generator uses a special implementation
if(null == servletContext.getAttribute(JawrConstant.GRAILS_WAR_DEPLOYED)){
if(LOGGER.isDebugEnabled()){
LOGGER.debug("Using standard servlet context resource reader.");
}
rd = new ServletContextResourceReader(servletContext, jawrConfig);
}else{
if(LOGGER.isDebugEnabled()){
LOGGER.debug("Using grails context resource reader.");
}
rd = new GrailsServletContextResourceReader(servletContext, jawrConfig);
}
addResourceReaderToEnd(rd);
// Add FileSystemResourceReader if needed
String baseContextDir = jawrConfig.getProperty(JawrConstant.JAWR_BASECONTEXT_DIRECTORY);
if(StringUtils.isNotEmpty(baseContextDir)){
ResourceReader fileRd = new FileSystemResourceReader(jawrConfig);
if(LOGGER.isDebugEnabled()){
LOGGER.debug("The base directory context is set to "+baseContextDir);
}
boolean baseContextDirHighPriority = Boolean.valueOf(jawrConfig.getProperty(JawrConstant.JAWR_BASECONTEXT_DIRECTORY_HIGH_PRIORITY));
if(baseContextDirHighPriority){
addResourceReaderToStart(fileRd);
if(LOGGER.isDebugEnabled()){
LOGGER.debug("Jawr will search in priority in the base directory context before searching in the war content.");
}
}else{
addResourceReaderToEnd(fileRd);
if(LOGGER.isDebugEnabled()){
LOGGER.debug("Jawr will search in priority in the war content before searching in the base directory context.");
}
}
}
}
/* (non-Javadoc)
* @see net.jawr.web.resource.handler.ResourceReaderHandler#getWorkingDirectory()
*/
public String getWorkingDirectory() {
return this.workingDirectory;
}
/* (non-Javadoc)
* @see net.jawr.web.resource.handler.WorkingDirectoryLocationAware#setWorkingDirectory(java.lang.String)
*/
public void setWorkingDirectory(String workingDir) {
this.workingDirectory = workingDir;
}
/**
* Initialize the reader
* @param obj the reader to initialize
*/
private void initReader(Object obj){
if(obj instanceof WorkingDirectoryLocationAware){
((WorkingDirectoryLocationAware) obj).setWorkingDirectory(workingDirectory);
}
if(obj instanceof ServletContextAware){
((ServletContextAware) obj).setServletContext(servletContext);
}
if(obj instanceof ResourceBrowser){
resourceInfoProviders.add(0, (ResourceBrowser)obj);
}
}
/**
* Adds the resource reader to the list of available resource readers.
* @param rd the resource reader
*/
public void addResourceReaderToEnd(ResourceReader rd) {
if(rd instanceof TextResourceReader){
resourceReaders.add((TextResourceReader) rd);
}
if(rd instanceof StreamResourceReader){
streamResourceReaders.add((StreamResourceReader) rd);
}
initReader(rd);
}
/**
* Adds the resource reader to the list of available resource readers at the specified position.
* @param rd
*/
public void addResourceReaderToStart(ResourceReader rd) {
if(rd instanceof TextResourceReader){
resourceReaders.add(0, (TextResourceReader) rd);
}
if(rd instanceof StreamResourceReader){
streamResourceReaders.add(0, (StreamResourceReader) rd);
}
initReader(rd);
}
/* (non-Javadoc)
* @see net.jawr.web.resource.handler.reader.ResourceReaderHandler#getResource(java.lang.String)
*/
public Reader getResource(String resourceName) throws ResourceNotFoundException{
return getResource(resourceName, false);
}
/* (non-Javadoc)
* @see net.jawr.web.resource.handler.ResourceReader#getResource(java.lang.String, boolean)
*/
public Reader getResource(String resourceName, boolean processingBundle) throws ResourceNotFoundException {
return getResource(resourceName, processingBundle, new ArrayList<Class<?>>());
}
/* (non-Javadoc)
* @see net.jawr.web.resource.handler.ResourceReader#getResource(java.lang.String, boolean)
*/
public Reader getResource(String resourceName, boolean processingBundle, List<Class<?>> excludedReader) throws ResourceNotFoundException {
Reader rd = null;
String resourceExtension = FileNameUtils.getExtension(resourceName);
boolean generatedPath = generatorRegistry.isPathGenerated(resourceName);
if(generatedPath || allowedExtensions.contains(resourceExtension.toLowerCase())){
for (Iterator<TextResourceReader> iterator = resourceReaders.iterator(); iterator.hasNext();) {
TextResourceReader rsReader = iterator.next();
if(!isInstanceOf(rsReader, excludedReader)){
if (!(rsReader instanceof BaseResourceGenerator)
|| ((BaseResourceGenerator)rsReader).getResolver().matchPath(resourceName))
{
rd = rsReader.getResource(resourceName, processingBundle);
if(rd != null){
break;
}
}
}
}
}else{
LOGGER.warn("The resource '"+resourceName+"' will not be read as its extension is not an allowed one.");
}
if(rd == null){
throw new ResourceNotFoundException(resourceName);
}
return rd;
}
private boolean isInstanceOf(Object rd, List<Class<?>> interfaces){
boolean result = false;
for (Class<?> class1 : interfaces) {
if(class1.isInstance(rd)){
result = true;
break;
}
}
return result;
}
/* (non-Javadoc)
* @see net.jawr.web.resource.handler.ResourceReader#getResourceAsStream(java.lang.String)
*/
public InputStream getResourceAsStream(String resourceName) throws ResourceNotFoundException {
return getResourceAsStream(resourceName, false);
}
/* (non-Javadoc)
* @see net.jawr.web.resource.handler.stream.StreamResourceReader#getResourceAsStream(java.lang.String, boolean)
*/
public InputStream getResourceAsStream(String resourceName,
boolean processingBundle) throws ResourceNotFoundException {
generatorRegistry.loadGeneratorIfNeeded(resourceName);
InputStream is = null;
String resourceExtension = FileNameUtils.getExtension(resourceName);
boolean generatedPath = generatorRegistry.isPathGenerated(resourceName);
if(generatedPath || allowedExtensions.contains(resourceExtension.toLowerCase())){
for (Iterator<StreamResourceReader> iterator = streamResourceReaders.iterator(); iterator.hasNext();) {
StreamResourceReader rsReader = iterator.next();
if (!(rsReader instanceof BaseResourceGenerator)
|| ((BaseResourceGenerator)rsReader).getResolver().matchPath(resourceName)){
is = rsReader.getResourceAsStream(resourceName);
if(is != null){
break;
}
}
}
}else{
LOGGER.warn("The resource '"+resourceName+"' will not be read as its extension is not an allowed one.");
}
if(is == null){
throw new ResourceNotFoundException(resourceName);
}
return is;
}
/* (non-Javadoc)
* @see net.jawr.web.resource.handler.reader.ResourceBrowser#getResourceNames(java.lang.String)
*/
public Set<String> getResourceNames(String dirName) {
Set<String> resourceNames = new TreeSet<String>();
for (Iterator<ResourceBrowser> iterator = resourceInfoProviders.iterator(); iterator.hasNext();) {
ResourceBrowser rsBrowser = iterator.next();
if(generatorRegistry.isPathGenerated(dirName)){
if (rsBrowser instanceof BaseResourceGenerator) {
BaseResourceGenerator rsGeneratorBrowser = (BaseResourceGenerator) rsBrowser;
if(rsGeneratorBrowser.getResolver().matchPath(dirName)){
resourceNames.addAll(rsBrowser.getResourceNames(dirName));
break;
}
}
}else{
if (!(rsBrowser instanceof BaseResourceGenerator)) {
resourceNames.addAll(rsBrowser.getResourceNames(dirName));
break;
}
}
}
return resourceNames;
}
/* (non-Javadoc)
* @see net.jawr.web.resource.handler.reader.ResourceBrowser#isDirectory(java.lang.String)
*/
public boolean isDirectory(String resourceName) {
boolean result = false;
for (Iterator<ResourceBrowser> iterator = resourceInfoProviders.iterator(); iterator.hasNext() && !result;) {
ResourceBrowser rsBrowser = iterator.next();
if(generatorRegistry.isPathGenerated(resourceName)){
if (rsBrowser instanceof BaseResourceGenerator) {
BaseResourceGenerator rsGeneratorBrowser = (BaseResourceGenerator) rsBrowser;
if(rsGeneratorBrowser.getResolver().matchPath(resourceName)){
result = rsBrowser.isDirectory(resourceName);
}
}
}else{
if(!(rsBrowser instanceof BaseResourceGenerator)){
result = rsBrowser.isDirectory(resourceName);
}
}
}
return result;
}
}
|
berinle/jawr-core
|
src/main/java/net/jawr/web/resource/handler/reader/ServletContextResourceReaderHandler.java
|
Java
|
apache-2.0
| 12,798
|
<html>
<head>
<title>PHPXref Explorer</title>
<meta http-equiv="content-type" content="text/html;charset=iso-8859-1">
<script language="JavaScript" type="text/javascript">
window.location='navframe-js.html';
</script>
<link rel="stylesheet" href="sample.css" type="text/css">
<style type="text/css">
</style>
<body bgcolor="#ffffff">
<!-- Generated by PHPXref 0.7.1 at Thu Oct 23 19:31:09 2014 -->
<!-- PHPXref (c) 2000-2010 Gareth Watts - gareth@omnipotent.net -->
<!-- http://phpxref.sourceforge.net/ -->
<div id="slist">
<p class="dirsubtitle">Explorer</p>
<ul class="dirlist">
<li><a href="./index.html">/</a></li>
<ul class="dirlist">
<li><a href="application/index.html" target="docview">application</a></li>
<ul class="dirlist">
<li><a href="application/cache/index.html" target="docview">cache</a></li>
<li><a href="application/config/index.html" target="docview">config</a></li>
<li><a href="application/controllers/index.html" target="docview">controllers</a></li>
<li><a href="application/core/index.html" target="docview">core</a></li>
<li><a href="application/errors/index.html" target="docview">errors</a></li>
<li><a href="application/helpers/index.html" target="docview">helpers</a></li>
<li><a href="application/hooks/index.html" target="docview">hooks</a></li>
<li><a href="application/language/index.html" target="docview">language</a></li>
<ul class="dirlist">
<li><a href="application/language/english/index.html" target="docview">english</a></li>
</ul>
<li><a href="application/libraries/index.html" target="docview">libraries</a></li>
<li><a href="application/logs/index.html" target="docview">logs</a></li>
<li><a href="application/models/index.html" target="docview">models</a></li>
<li><a href="application/third_party/index.html" target="docview">third_party</a></li>
<li><a href="application/views/index.html" target="docview">views</a></li>
</ul>
<li><a href="system/index.html" target="docview">system</a></li>
<ul class="dirlist">
<li><a href="system/core/index.html" target="docview">core</a></li>
<li><a href="system/database/index.html" target="docview">database</a></li>
<ul class="dirlist">
<li><a href="system/database/drivers/index.html" target="docview">drivers</a></li>
<ul class="dirlist">
<li><a href="system/database/drivers/cubrid/index.html" target="docview">cubrid</a></li>
<li><a href="system/database/drivers/mssql/index.html" target="docview">mssql</a></li>
<li><a href="system/database/drivers/mysql/index.html" target="docview">mysql</a></li>
<li><a href="system/database/drivers/mysqli/index.html" target="docview">mysqli</a></li>
<li><a href="system/database/drivers/oci8/index.html" target="docview">oci8</a></li>
<li><a href="system/database/drivers/odbc/index.html" target="docview">odbc</a></li>
<li><a href="system/database/drivers/pdo/index.html" target="docview">pdo</a></li>
<li><a href="system/database/drivers/postgre/index.html" target="docview">postgre</a></li>
<li><a href="system/database/drivers/sqlite/index.html" target="docview">sqlite</a></li>
<li><a href="system/database/drivers/sqlsrv/index.html" target="docview">sqlsrv</a></li>
</ul>
</ul>
<li><a href="system/fonts/index.html" target="docview">fonts</a></li>
<li><a href="system/helpers/index.html" target="docview">helpers</a></li>
<li><a href="system/language/index.html" target="docview">language</a></li>
<ul class="dirlist">
<li><a href="system/language/english/index.html" target="docview">english</a></li>
</ul>
<li><a href="system/libraries/index.html" target="docview">libraries</a></li>
<ul class="dirlist">
<li><a href="system/libraries/Cache/index.html" target="docview">Cache</a></li>
<ul class="dirlist">
<li><a href="system/libraries/Cache/drivers/index.html" target="docview">drivers</a></li>
</ul>
<li><a href="system/libraries/javascript/index.html" target="docview">javascript</a></li>
</ul>
</ul>
</ul>
</ul>
</div>
</body></html>
|
inputx/code-ref-doc
|
codeigniter/navframe.html
|
HTML
|
apache-2.0
| 3,953
|
package se.r2m.bigint.casinofront;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import se.r2m.bigint.casinofront.game.GameEngine;
import se.r2m.bigint.casinofront.game.KafkaGameEngine;
@Configuration
@EnableAutoConfiguration
@ComponentScan
public class CasinoFrontApplication {
private static final Logger LOG = LoggerFactory.getLogger(CasinoFrontApplication.class);
public CasinoFrontApplication() {
LOG.info("Starting CasinoFrontApplication...");
}
public static void main(String[] args) {
ApplicationContext ctx = SpringApplication.run(CasinoFrontApplication.class, args);
// System.out.println("Let's inspect the beans provided by Spring Boot:");
//
//
// String[] beanNames = ctx.getBeanDefinitionNames();
// Arrays.sort(beanNames);
// for (String beanName : beanNames) {
// System.out.println(beanName);
// }
}
@Bean
public GameEngine getGameEngine() {
return new KafkaGameEngine();
}
}
|
r2m/r2m-innovation-bigintegration
|
casino-server-front/src/main/java/se/r2m/bigint/casinofront/CasinoFrontApplication.java
|
Java
|
apache-2.0
| 1,385
|
// this sets the background color of the master UIView (when there are no windows/tab groups on it)
Ti.UI.setBackgroundColor('#000');
Ti.UI.iPhone.statusBarStyle = Ti.UI.iPhone.StatusBar.OPAQUE_BLACK;
//Create main app namespace
var demo={};
//Create a few helpers
demo.myHelpers = {
isAndroid : function(){
return (Ti.Platform.name == 'android');
},
makeWindow : function(a){
a = a || {};
var win = Ti.UI.createWindow(a);
//Force the orientations
win.orientationModes = [
Ti.UI.PORTRAIT,
Ti.UI.UPSIDE_PORTRAIT
];
return win;
}
};
//Bring in the child windows
Ti.include('main.js','picker.js');
//Bring in the color picker functions (CommonJS format)
demo.colorHelper = require('colorhelper');
//Create our configs
var winConfig = {
title:Ti.Locale.getString('sample_win_title'),
barColor:'#000',
backgroundColor:'#fff',
tabBarHidden:true,
fullscreen:false,
navBarHidden : (Ti.Platform.name == 'android')
};
//Create the main launch window
demo.mainWindow = demo.launchMainWindow(winConfig);
//Check if we didn't return the window correctly
if(demo.mainWindow===null){
alert(Ti.Locale.getString('forgot_return'));
}
//Add an app listener for the event sample
Ti.App.addEventListener('color:update_color', function(e){
demo.mainWindow.backgroundColor=e.selectedColor;
});
//Based on platfomr launch the App in a specific way
if(Ti.Platform.name!=='android'){
var tabGroup = Ti.UI.createTabGroup();
var tab1 = Ti.UI.createTab({window:demo.mainWindow});
tabGroup.addTab(tab1);
// open tab group
tabGroup.open();
}else{
demo.mainWindow.open();
}
|
benbahrenburg/TiColorPickerExample
|
Resources/app.js
|
JavaScript
|
apache-2.0
| 1,598
|
/* ----------------------------------------------------------------------- *//**
*
* @file model.hpp
*
* This file contians classes of coefficients (or model), which usually has
* fields that maps to transition states for user-defined aggregates.
* The necessity of these wrappers is to allow classes in algo/ and task/ to
* have a type that they can template over.
*
*//* ----------------------------------------------------------------------- */
#ifndef MADLIB_MODULES_CONVEX_TYPE_MODEL_HPP_
#define MADLIB_MODULES_CONVEX_TYPE_MODEL_HPP_
#include <modules/shared/HandleTraits.hpp>
namespace madlib {
namespace modules {
namespace convex {
template <class Handle>
struct LMFModel {
typename HandleTraits<Handle>::MatrixTransparentHandleMap matrixU;
typename HandleTraits<Handle>::MatrixTransparentHandleMap matrixV;
/**
* @brief Space needed.
*
* Extra information besides the values in the matrix, like dimension is
* necessary for a matrix, so that it can perform operations. These are
* stored in the HandleMap.
*/
static inline uint32_t arraySize(const int32_t inRowDim,
const int32_t inColDim, const int32_t inMaxRank) {
return (inRowDim + inColDim) * inMaxRank;
}
/**
* @brief Initialize the model randomly with a user-provided scale factor
*/
void initialize(const double &inScaleFactor) {
// using madlib::dbconnector::$database::NativeRandomNumberGenerator
NativeRandomNumberGenerator rng;
int i, j, rr;
double base = rng.min();
double span = rng.max() - base;
for (rr = 0; rr < matrixU.cols(); rr ++) {
for (i = 0; i < matrixU.rows(); i ++) {
matrixU(i, rr) = inScaleFactor * (rng() - base) / span;
}
}
for (rr = 0; rr < matrixV.cols(); rr ++) {
for (j = 0; j < matrixV.rows(); j ++) {
matrixV(j, rr) = inScaleFactor * (rng() - base) / span;
}
}
}
/*
* Some operator wrappers for two matrices.
*/
LMFModel &operator*=(const double &c) {
matrixU *= c;
matrixV *= c;
return *this;
}
template<class OtherHandle>
LMFModel &operator-=(const LMFModel<OtherHandle> &inOtherModel) {
matrixU -= inOtherModel.matrixU;
matrixV -= inOtherModel.matrixV;
return *this;
}
template<class OtherHandle>
LMFModel &operator+=(const LMFModel<OtherHandle> &inOtherModel) {
matrixU += inOtherModel.matrixU;
matrixV += inOtherModel.matrixV;
return *this;
}
template<class OtherHandle>
LMFModel &operator=(const LMFModel<OtherHandle> &inOtherModel) {
matrixU = inOtherModel.matrixU;
matrixV = inOtherModel.matrixV;
return *this;
}
};
typedef HandleTraits<MutableArrayHandle<double> >::ColumnVectorTransparentHandleMap
GLMModel;
typedef HandleTraits<MutableArrayHandle<double> >::ColumnVectorTransparentHandleMap
SVMModel;
// The necessity of this wrapper is to allow classes in algo/ and task/ to
// have a type that they can template over
template <class Handle>
struct MLPModel {
typename HandleTraits<Handle>::ReferenceToDouble is_classification;
typename HandleTraits<Handle>::ReferenceToDouble activation;
// std::vector<Eigen::Map<Matrix > > u;
std::vector<MutableMappedMatrix> u;
/**
* @brief Space needed.
*
* Extra information besides the values in the matrix, like dimension is
* necessary for a matrix, so that it can perform operations. These are
* stored in the HandleMap.
*/
static inline uint32_t arraySize(const uint16_t &inNumberOfStages,
const double *inNumbersOfUnits) {
// inNumberOfStages == 0 is not an expected value, but
// it won't cause exception -- returning 0
uint32_t size = 0;
size_t N = inNumberOfStages;
const double *n = inNumbersOfUnits;
size_t k;
for (k = 0; k < N; k ++) {
size += (n[k] + 1) * (n[k+1]);
}
return size; // weights (u)
}
uint32_t rebind(const double *is_classification_in,
const double *activation_in,
const double *data,
const uint16_t &inNumberOfStages,
const double *inNumbersOfUnits) {
size_t N = inNumberOfStages;
const double *n = inNumbersOfUnits;
size_t k;
is_classification.rebind(is_classification_in);
activation.rebind(activation_in);
uint32_t sizeOfU = 0;
u.clear();
for (k = 0; k < N; k ++) {
// u.push_back(Eigen::Map<Matrix >(
// const_cast<double*>(data + sizeOfU),
// n[k] + 1, n[k+1]));
u.push_back(MutableMappedMatrix());
u[k].rebind(const_cast<double *>(data + sizeOfU), n[k] + 1, n[k+1]);
sizeOfU += (n[k] + 1) * (n[k+1]);
}
return sizeOfU;
}
void initialize(const uint16_t &inNumberOfStages,
const double *inNumbersOfUnits){
size_t N = inNumberOfStages;
const double *n = inNumbersOfUnits;
size_t k;
double span;
for (k =0; k < N; ++k){
// Initalize according to Glorot and Bengio (2010)
// See design doc for more info
span = sqrt(6.0 / (n[k] + n[k+1]));
u[k] << span * Matrix::Random(u[k].rows(), u[k].cols());
}
}
double norm() const {
double norm = 0.;
size_t k;
for (k = 0; k < u.size(); k ++) {
norm += u[k].bottomRows(u[k].rows()-1).squaredNorm();
}
return std::sqrt(norm);
}
void setZero(){
size_t k;
for (k = 0; k < u.size(); k ++) {
u[k].setZero();
}
}
/*
* Some operator wrappers for u.
*/
MLPModel& operator*=(const double &c) {
// Note that when scaling the model, you should
// not update the bias.
size_t k;
for (k = 0; k < u.size(); k ++) {
u[k] *= c;
}
return *this;
}
template<class OtherHandle>
MLPModel& operator-=(const MLPModel<OtherHandle> &inOtherModel) {
size_t k;
for (k = 0; k < u.size() && k < inOtherModel.u.size(); k ++) {
u[k] -= inOtherModel.u[k];
}
return *this;
}
template<class OtherHandle>
MLPModel& operator+=(const MLPModel<OtherHandle> &inOtherModel) {
size_t k;
for (k = 0; k < u.size() && k < inOtherModel.u.size(); k ++) {
u[k] += inOtherModel.u[k];
}
return *this;
}
template<class OtherHandle>
MLPModel& operator=(const MLPModel<OtherHandle> &inOtherModel) {
size_t k;
for (k = 0; k < u.size() && k < inOtherModel.u.size(); k ++) {
u[k] = inOtherModel.u[k];
}
is_classification = inOtherModel.is_classification;
activation = inOtherModel.activation;
return *this;
}
};
} // namespace convex
} // namespace modules
} // namespace madlib
#endif
|
iyerr3/incubator-madlib
|
src/modules/convex/type/model.hpp
|
C++
|
apache-2.0
| 7,228
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.mina.transport.socket.nio;
import java.net.Socket;
import org.apache.mina.common.IoSessionConfig;
/**
* An {@link IoSessionConfig} for socket transport type.
*
* @author The Apache Directory Project (mina-dev@directory.apache.org)
* @version $Rev$, $Date$
*/
public interface SocketSessionConfig extends IoSessionConfig {
/**
* @see Socket#getReuseAddress()
*/
boolean isReuseAddress();
/**
* @see Socket#setReuseAddress(boolean)
*/
void setReuseAddress(boolean reuseAddress);
/**
* @see Socket#getReceiveBufferSize()
*/
int getReceiveBufferSize();
/**
* @see Socket#setReceiveBufferSize(int)
*/
void setReceiveBufferSize(int receiveBufferSize);
/**
* @see Socket#getSendBufferSize()
*/
int getSendBufferSize();
/**
* @see Socket#setSendBufferSize(int)
*/
void setSendBufferSize(int sendBufferSize);
/**
* @see Socket#getTrafficClass()
*/
int getTrafficClass();
/**
* @see Socket#setTrafficClass(int)
*/
void setTrafficClass(int trafficClass);
/**
* @see Socket#getKeepAlive()
*/
boolean isKeepAlive();
/**
* @see Socket#setKeepAlive(boolean)
*/
void setKeepAlive(boolean keepAlive);
/**
* @see Socket#getOOBInline()
*/
boolean isOobInline();
/**
* @see Socket#setOOBInline(boolean)
*/
void setOobInline(boolean oobInline);
/**
* Please note that enabling <tt>SO_LINGER</tt> in Java NIO can result
* in platform-dependent behavior and unexpected blocking of I/O thread.
*
* @see Socket#getSoLinger()
* @see <a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6179351">Sun Bug Database</a>
*/
int getSoLinger();
/**
* Please note that enabling <tt>SO_LINGER</tt> in Java NIO will result
* in platform-dependent behavior and unexpected blocking of I/O thread.
*
* @param soLinger Please specify a negative value to disable <tt>SO_LINGER</tt>.
*
* @see Socket#setSoLinger(boolean, int)
* @see <a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6179351">Sun Bug Database</a>
*/
void setSoLinger(int soLinger);
/**
* @see Socket#getTcpNoDelay()
*/
boolean isTcpNoDelay();
/**
* @see Socket#setTcpNoDelay(boolean)
*/
void setTcpNoDelay(boolean tcpNoDelay);
}
|
sarvex/MINA
|
core/src/main/java/org/apache/mina/transport/socket/nio/SocketSessionConfig.java
|
Java
|
apache-2.0
| 3,285
|
<?php
/**
* Rate Controller
*
*
*/
class RateController extends Controller {
/**
* Add a new rate to the bank rate table
*
*/
public function actionAdd() {
$city = trim($this->request->getParam('city'));
$state = trim(strtoupper($this->request->getParam('state')));
$rate = intval($this->request->getParam('rate'));
// Validate all fields were set
if(!$rate) {
throw new CHttpException(500, "Sorry, You must provide the rate amount.");
}
if(!$city) {
throw new CHttpException(500, "Sorry, You must provide the city name.");
}
if(!$state || !State::model()->find('state_code=:state', array(':state' => $state))) {
throw new CHttpException(500, "Sorry, You must provide a valid state code.");
}
// Add new
$cityRecord = new City;
$cityRecord->city = $city;
$cityRecord->state_code = $state;
if(!$cityRecord->save()) {
throw new CHttpException(500, "Could not save the new city.");
}
// Add bank rate
$bankRate = new BankRate;
$bankRate->city_id = $cityRecord->id;
$bankRate->rate = $rate;
if(!$bankRate->save()) {
throw new CHttpException(500, "Could not save the new bank rate.");
}
// Load the newly created record
echo json_encode(array('status' => 'OK', 'record' => $bankRate->attributes));
exit;
}
/**
* Update bank rate based on city name
*
*/
public function actionUpdate() {
$city = trim($this->request->getParam('city'));
$rate = intval($this->request->getParam('rate'));
// Validate all fields were set
if(!$rate) {
throw new CHttpException(500, "Sorry, You must provide the rate amount.");
}
if(!$city) {
throw new CHttpException(500, "Sorry, You must provide the city name.");
}
$row = BankRate::model()->with(array('city'))->find('city.city=:name', array(':name' => $city));
if(!$row) {
throw new CHttpException(500, "Sorry, We could not find a record with that name.");
}
// Update rate
$row->rate = $rate;
$row->update();
// Load the newly created record
echo json_encode(array('status' => 'OK', 'record' => $row->attributes));
exit;
}
/**
* Delete a city bank rate by city name
*
*/
public function actionDelete() {
$city = trim($this->request->getParam('city'));
if(!$city) {
throw new CHttpException(500, "Sorry, You must provide the city name.");
}
$row = BankRate::model()->with(array('city'))->find('city.city=:name', array(':name' => $city));
if(!$row) {
throw new CHttpException(500, "Sorry, We could not find a record with that name.");
}
// Delete
$row->delete();
echo json_encode(array('status' => 'OK'));
exit;
}
/**
* Default action is not supported
*
*/
public function actionIndex() {
die('Index is not supported.');
}
}
|
VinceG/consumertrackexercise
|
protected/controllers/RateController.php
|
PHP
|
apache-2.0
| 2,747
|
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/awstransfer/Transfer_EXPORTS.h>
#include <aws/awstransfer/TransferRequest.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <aws/awstransfer/model/HomeDirectoryType.h>
#include <aws/core/utils/memory/stl/AWSVector.h>
#include <aws/awstransfer/model/PosixProfile.h>
#include <aws/awstransfer/model/HomeDirectoryMapEntry.h>
#include <utility>
namespace Aws
{
namespace Transfer
{
namespace Model
{
/**
*/
class AWS_TRANSFER_API UpdateUserRequest : public TransferRequest
{
public:
UpdateUserRequest();
// Service request name is the Operation name which will send this request out,
// each operation should has unique request name, so that we can get operation's name from this request.
// Note: this is not true for response, multiple operations may have the same response name,
// so we can not get operation's name from response.
inline virtual const char* GetServiceRequestName() const override { return "UpdateUser"; }
Aws::String SerializePayload() const override;
Aws::Http::HeaderValueCollection GetRequestSpecificHeaders() const override;
/**
* <p>Specifies the landing directory (folder) for a user when they log in to the
* server using their file transfer protocol client.</p> <p>An example is
* <code>your-Amazon-S3-bucket-name>/home/username</code>.</p>
*/
inline const Aws::String& GetHomeDirectory() const{ return m_homeDirectory; }
/**
* <p>Specifies the landing directory (folder) for a user when they log in to the
* server using their file transfer protocol client.</p> <p>An example is
* <code>your-Amazon-S3-bucket-name>/home/username</code>.</p>
*/
inline bool HomeDirectoryHasBeenSet() const { return m_homeDirectoryHasBeenSet; }
/**
* <p>Specifies the landing directory (folder) for a user when they log in to the
* server using their file transfer protocol client.</p> <p>An example is
* <code>your-Amazon-S3-bucket-name>/home/username</code>.</p>
*/
inline void SetHomeDirectory(const Aws::String& value) { m_homeDirectoryHasBeenSet = true; m_homeDirectory = value; }
/**
* <p>Specifies the landing directory (folder) for a user when they log in to the
* server using their file transfer protocol client.</p> <p>An example is
* <code>your-Amazon-S3-bucket-name>/home/username</code>.</p>
*/
inline void SetHomeDirectory(Aws::String&& value) { m_homeDirectoryHasBeenSet = true; m_homeDirectory = std::move(value); }
/**
* <p>Specifies the landing directory (folder) for a user when they log in to the
* server using their file transfer protocol client.</p> <p>An example is
* <code>your-Amazon-S3-bucket-name>/home/username</code>.</p>
*/
inline void SetHomeDirectory(const char* value) { m_homeDirectoryHasBeenSet = true; m_homeDirectory.assign(value); }
/**
* <p>Specifies the landing directory (folder) for a user when they log in to the
* server using their file transfer protocol client.</p> <p>An example is
* <code>your-Amazon-S3-bucket-name>/home/username</code>.</p>
*/
inline UpdateUserRequest& WithHomeDirectory(const Aws::String& value) { SetHomeDirectory(value); return *this;}
/**
* <p>Specifies the landing directory (folder) for a user when they log in to the
* server using their file transfer protocol client.</p> <p>An example is
* <code>your-Amazon-S3-bucket-name>/home/username</code>.</p>
*/
inline UpdateUserRequest& WithHomeDirectory(Aws::String&& value) { SetHomeDirectory(std::move(value)); return *this;}
/**
* <p>Specifies the landing directory (folder) for a user when they log in to the
* server using their file transfer protocol client.</p> <p>An example is
* <code>your-Amazon-S3-bucket-name>/home/username</code>.</p>
*/
inline UpdateUserRequest& WithHomeDirectory(const char* value) { SetHomeDirectory(value); return *this;}
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline const HomeDirectoryType& GetHomeDirectoryType() const{ return m_homeDirectoryType; }
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline bool HomeDirectoryTypeHasBeenSet() const { return m_homeDirectoryTypeHasBeenSet; }
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline void SetHomeDirectoryType(const HomeDirectoryType& value) { m_homeDirectoryTypeHasBeenSet = true; m_homeDirectoryType = value; }
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline void SetHomeDirectoryType(HomeDirectoryType&& value) { m_homeDirectoryTypeHasBeenSet = true; m_homeDirectoryType = std::move(value); }
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline UpdateUserRequest& WithHomeDirectoryType(const HomeDirectoryType& value) { SetHomeDirectoryType(value); return *this;}
/**
* <p>The type of landing directory (folder) you want your users' home directory to
* be when they log into the server. If you set it to <code>PATH</code>, the user
* will see the absolute Amazon S3 bucket paths as is in their file transfer
* protocol clients. If you set it <code>LOGICAL</code>, you will need to provide
* mappings in the <code>HomeDirectoryMappings</code> for how you want to make
* Amazon S3 paths visible to your users.</p>
*/
inline UpdateUserRequest& WithHomeDirectoryType(HomeDirectoryType&& value) { SetHomeDirectoryType(std::move(value)); return *this;}
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a / for it to be considered a
* folder.</p>
*/
inline const Aws::Vector<HomeDirectoryMapEntry>& GetHomeDirectoryMappings() const{ return m_homeDirectoryMappings; }
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a / for it to be considered a
* folder.</p>
*/
inline bool HomeDirectoryMappingsHasBeenSet() const { return m_homeDirectoryMappingsHasBeenSet; }
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a / for it to be considered a
* folder.</p>
*/
inline void SetHomeDirectoryMappings(const Aws::Vector<HomeDirectoryMapEntry>& value) { m_homeDirectoryMappingsHasBeenSet = true; m_homeDirectoryMappings = value; }
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a / for it to be considered a
* folder.</p>
*/
inline void SetHomeDirectoryMappings(Aws::Vector<HomeDirectoryMapEntry>&& value) { m_homeDirectoryMappingsHasBeenSet = true; m_homeDirectoryMappings = std::move(value); }
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a / for it to be considered a
* folder.</p>
*/
inline UpdateUserRequest& WithHomeDirectoryMappings(const Aws::Vector<HomeDirectoryMapEntry>& value) { SetHomeDirectoryMappings(value); return *this;}
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a / for it to be considered a
* folder.</p>
*/
inline UpdateUserRequest& WithHomeDirectoryMappings(Aws::Vector<HomeDirectoryMapEntry>&& value) { SetHomeDirectoryMappings(std::move(value)); return *this;}
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a / for it to be considered a
* folder.</p>
*/
inline UpdateUserRequest& AddHomeDirectoryMappings(const HomeDirectoryMapEntry& value) { m_homeDirectoryMappingsHasBeenSet = true; m_homeDirectoryMappings.push_back(value); return *this; }
/**
* <p>Logical directory mappings that specify what Amazon S3 paths and keys should
* be visible to your user and how you want to make them visible. You will need to
* specify the "<code>Entry</code>" and "<code>Target</code>" pair, where
* <code>Entry</code> shows how the path is made visible and <code>Target</code> is
* the actual Amazon S3 path. If you only specify a target, it will be displayed as
* is. You will need to also make sure that your IAM role provides access to paths
* in <code>Target</code>. The following is an example.</p> <p> <code>'[
* "/bucket2/documentation", { "Entry": "your-personal-report.pdf", "Target":
* "/bucket3/customized-reports/${transfer:UserName}.pdf" } ]'</code> </p> <p>In
* most cases, you can use this value instead of the scope-down policy to lock your
* user down to the designated home directory ("chroot"). To do this, you can set
* <code>Entry</code> to '/' and set <code>Target</code> to the HomeDirectory
* parameter value.</p> <p>If the target of a logical directory entry does
* not exist in Amazon S3, the entry will be ignored. As a workaround, you can use
* the Amazon S3 API to create 0 byte objects as place holders for your directory.
* If using the CLI, use the <code>s3api</code> call instead of <code>s3</code> so
* you can use the put-object operation. For example, you use the following:
* <code>aws s3api put-object --bucket bucketname --key path/to/folder/</code>.
* Make sure that the end of the key name ends in a / for it to be considered a
* folder.</p>
*/
inline UpdateUserRequest& AddHomeDirectoryMappings(HomeDirectoryMapEntry&& value) { m_homeDirectoryMappingsHasBeenSet = true; m_homeDirectoryMappings.push_back(std::move(value)); return *this; }
/**
* <p>Allows you to supply a scope-down policy for your user so you can use the
* same IAM role across multiple users. The policy scopes down user access to
* portions of your Amazon S3 bucket. Variables you can use inside this policy
* include <code>${Transfer:UserName}</code>,
* <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline const Aws::String& GetPolicy() const{ return m_policy; }
/**
* <p>Allows you to supply a scope-down policy for your user so you can use the
* same IAM role across multiple users. The policy scopes down user access to
* portions of your Amazon S3 bucket. Variables you can use inside this policy
* include <code>${Transfer:UserName}</code>,
* <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline bool PolicyHasBeenSet() const { return m_policyHasBeenSet; }
/**
* <p>Allows you to supply a scope-down policy for your user so you can use the
* same IAM role across multiple users. The policy scopes down user access to
* portions of your Amazon S3 bucket. Variables you can use inside this policy
* include <code>${Transfer:UserName}</code>,
* <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline void SetPolicy(const Aws::String& value) { m_policyHasBeenSet = true; m_policy = value; }
/**
* <p>Allows you to supply a scope-down policy for your user so you can use the
* same IAM role across multiple users. The policy scopes down user access to
* portions of your Amazon S3 bucket. Variables you can use inside this policy
* include <code>${Transfer:UserName}</code>,
* <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline void SetPolicy(Aws::String&& value) { m_policyHasBeenSet = true; m_policy = std::move(value); }
/**
* <p>Allows you to supply a scope-down policy for your user so you can use the
* same IAM role across multiple users. The policy scopes down user access to
* portions of your Amazon S3 bucket. Variables you can use inside this policy
* include <code>${Transfer:UserName}</code>,
* <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline void SetPolicy(const char* value) { m_policyHasBeenSet = true; m_policy.assign(value); }
/**
* <p>Allows you to supply a scope-down policy for your user so you can use the
* same IAM role across multiple users. The policy scopes down user access to
* portions of your Amazon S3 bucket. Variables you can use inside this policy
* include <code>${Transfer:UserName}</code>,
* <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline UpdateUserRequest& WithPolicy(const Aws::String& value) { SetPolicy(value); return *this;}
/**
* <p>Allows you to supply a scope-down policy for your user so you can use the
* same IAM role across multiple users. The policy scopes down user access to
* portions of your Amazon S3 bucket. Variables you can use inside this policy
* include <code>${Transfer:UserName}</code>,
* <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline UpdateUserRequest& WithPolicy(Aws::String&& value) { SetPolicy(std::move(value)); return *this;}
/**
* <p>Allows you to supply a scope-down policy for your user so you can use the
* same IAM role across multiple users. The policy scopes down user access to
* portions of your Amazon S3 bucket. Variables you can use inside this policy
* include <code>${Transfer:UserName}</code>,
* <code>${Transfer:HomeDirectory}</code>, and
* <code>${Transfer:HomeBucket}</code>.</p> <p>For scope-down policies, AWS
* Transfer Family stores the policy as a JSON blob, instead of the Amazon Resource
* Name (ARN) of the policy. You save the policy as a JSON blob and pass it in the
* <code>Policy</code> argument.</p> <p>For an example of a scope-down policy, see
* <a
* href="https://docs.aws.amazon.com/transfer/latest/userguide/users.html#users-policies-scope-down">Creating
* a scope-down policy</a>.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html">AssumeRole</a>
* in the <i>AWS Security Token Service API Reference</i>.</p>
*/
inline UpdateUserRequest& WithPolicy(const char* value) { SetPolicy(value); return *this;}
inline const PosixProfile& GetPosixProfile() const{ return m_posixProfile; }
inline bool PosixProfileHasBeenSet() const { return m_posixProfileHasBeenSet; }
inline void SetPosixProfile(const PosixProfile& value) { m_posixProfileHasBeenSet = true; m_posixProfile = value; }
inline void SetPosixProfile(PosixProfile&& value) { m_posixProfileHasBeenSet = true; m_posixProfile = std::move(value); }
inline UpdateUserRequest& WithPosixProfile(const PosixProfile& value) { SetPosixProfile(value); return *this;}
inline UpdateUserRequest& WithPosixProfile(PosixProfile&& value) { SetPosixProfile(std::move(value)); return *this;}
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline const Aws::String& GetRole() const{ return m_role; }
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline bool RoleHasBeenSet() const { return m_roleHasBeenSet; }
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline void SetRole(const Aws::String& value) { m_roleHasBeenSet = true; m_role = value; }
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline void SetRole(Aws::String&& value) { m_roleHasBeenSet = true; m_role = std::move(value); }
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline void SetRole(const char* value) { m_roleHasBeenSet = true; m_role.assign(value); }
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline UpdateUserRequest& WithRole(const Aws::String& value) { SetRole(value); return *this;}
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline UpdateUserRequest& WithRole(Aws::String&& value) { SetRole(std::move(value)); return *this;}
/**
* <p>The IAM role that controls your users' access to your Amazon S3 bucket. The
* policies attached to this role will determine the level of access you want to
* provide your users when transferring files into and out of your Amazon S3 bucket
* or buckets. The IAM role should also contain a trust relationship that allows
* the server to access your resources when servicing your users' transfer
* requests.</p>
*/
inline UpdateUserRequest& WithRole(const char* value) { SetRole(value); return *this;}
/**
* <p>A system-assigned unique identifier for a server instance that the user
* account is assigned to.</p>
*/
inline const Aws::String& GetServerId() const{ return m_serverId; }
/**
* <p>A system-assigned unique identifier for a server instance that the user
* account is assigned to.</p>
*/
inline bool ServerIdHasBeenSet() const { return m_serverIdHasBeenSet; }
/**
* <p>A system-assigned unique identifier for a server instance that the user
* account is assigned to.</p>
*/
inline void SetServerId(const Aws::String& value) { m_serverIdHasBeenSet = true; m_serverId = value; }
/**
* <p>A system-assigned unique identifier for a server instance that the user
* account is assigned to.</p>
*/
inline void SetServerId(Aws::String&& value) { m_serverIdHasBeenSet = true; m_serverId = std::move(value); }
/**
* <p>A system-assigned unique identifier for a server instance that the user
* account is assigned to.</p>
*/
inline void SetServerId(const char* value) { m_serverIdHasBeenSet = true; m_serverId.assign(value); }
/**
* <p>A system-assigned unique identifier for a server instance that the user
* account is assigned to.</p>
*/
inline UpdateUserRequest& WithServerId(const Aws::String& value) { SetServerId(value); return *this;}
/**
* <p>A system-assigned unique identifier for a server instance that the user
* account is assigned to.</p>
*/
inline UpdateUserRequest& WithServerId(Aws::String&& value) { SetServerId(std::move(value)); return *this;}
/**
* <p>A system-assigned unique identifier for a server instance that the user
* account is assigned to.</p>
*/
inline UpdateUserRequest& WithServerId(const char* value) { SetServerId(value); return *this;}
/**
* <p>A unique string that identifies a user and is associated with a server as
* specified by the <code>ServerId</code>. This user name must be a minimum of 3
* and a maximum of 100 characters long. The following are valid characters: a-z,
* A-Z, 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline const Aws::String& GetUserName() const{ return m_userName; }
/**
* <p>A unique string that identifies a user and is associated with a server as
* specified by the <code>ServerId</code>. This user name must be a minimum of 3
* and a maximum of 100 characters long. The following are valid characters: a-z,
* A-Z, 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline bool UserNameHasBeenSet() const { return m_userNameHasBeenSet; }
/**
* <p>A unique string that identifies a user and is associated with a server as
* specified by the <code>ServerId</code>. This user name must be a minimum of 3
* and a maximum of 100 characters long. The following are valid characters: a-z,
* A-Z, 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline void SetUserName(const Aws::String& value) { m_userNameHasBeenSet = true; m_userName = value; }
/**
* <p>A unique string that identifies a user and is associated with a server as
* specified by the <code>ServerId</code>. This user name must be a minimum of 3
* and a maximum of 100 characters long. The following are valid characters: a-z,
* A-Z, 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline void SetUserName(Aws::String&& value) { m_userNameHasBeenSet = true; m_userName = std::move(value); }
/**
* <p>A unique string that identifies a user and is associated with a server as
* specified by the <code>ServerId</code>. This user name must be a minimum of 3
* and a maximum of 100 characters long. The following are valid characters: a-z,
* A-Z, 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline void SetUserName(const char* value) { m_userNameHasBeenSet = true; m_userName.assign(value); }
/**
* <p>A unique string that identifies a user and is associated with a server as
* specified by the <code>ServerId</code>. This user name must be a minimum of 3
* and a maximum of 100 characters long. The following are valid characters: a-z,
* A-Z, 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline UpdateUserRequest& WithUserName(const Aws::String& value) { SetUserName(value); return *this;}
/**
* <p>A unique string that identifies a user and is associated with a server as
* specified by the <code>ServerId</code>. This user name must be a minimum of 3
* and a maximum of 100 characters long. The following are valid characters: a-z,
* A-Z, 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline UpdateUserRequest& WithUserName(Aws::String&& value) { SetUserName(std::move(value)); return *this;}
/**
* <p>A unique string that identifies a user and is associated with a server as
* specified by the <code>ServerId</code>. This user name must be a minimum of 3
* and a maximum of 100 characters long. The following are valid characters: a-z,
* A-Z, 0-9, underscore '_', hyphen '-', period '.', and at sign '@'. The user name
* can't start with a hyphen, period, or at sign.</p>
*/
inline UpdateUserRequest& WithUserName(const char* value) { SetUserName(value); return *this;}
private:
Aws::String m_homeDirectory;
bool m_homeDirectoryHasBeenSet;
HomeDirectoryType m_homeDirectoryType;
bool m_homeDirectoryTypeHasBeenSet;
Aws::Vector<HomeDirectoryMapEntry> m_homeDirectoryMappings;
bool m_homeDirectoryMappingsHasBeenSet;
Aws::String m_policy;
bool m_policyHasBeenSet;
PosixProfile m_posixProfile;
bool m_posixProfileHasBeenSet;
Aws::String m_role;
bool m_roleHasBeenSet;
Aws::String m_serverId;
bool m_serverIdHasBeenSet;
Aws::String m_userName;
bool m_userNameHasBeenSet;
};
} // namespace Model
} // namespace Transfer
} // namespace Aws
|
jt70471/aws-sdk-cpp
|
aws-cpp-sdk-awstransfer/include/aws/awstransfer/model/UpdateUserRequest.h
|
C
|
apache-2.0
| 43,302
|
create temp table jsoninp as select * from stdinput();
drop table if exists grants;
create table grants as select conceptId,conceptLabel,suggestedAcknowledgement
from
(setschema 'conceptId,conceptLabel,suggestedAcknowledgement'
select c1 as conceptId,c2 as conceptLabel, c3 as suggestedAcknowledgement
from
(select * from (setschema 'c1,c2,c3,c4' select jsonpath(c1, '$.id','$.label','$.suggestedAcknowledgement','$.jsonextrainfo') from jsoninp)));
create index grants_index on grants (conceptId,conceptLabel,suggestedAcknowledgement);
|
openaire/iis
|
iis-wf/iis-wf-referenceextraction/src/main/resources/eu/dnetlib/iis/wf/referenceextraction/community/sqlite_builder/oozie_app/lib/scripts/buildcummunitiesdb.sql
|
SQL
|
apache-2.0
| 600
|
#
# Copyright 2014 Telefonica Investigacion y Desarrollo, S.A.U
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit tests for SPASSWORD checker."""
from keystone import tests
from keystone import exception
import keystone_spassword.contrib.spassword.checker
class TestPasswordChecker(tests.BaseTestCase):
def test_checker(self):
new_password = "stronger"
self.assertRaises(exception.ValidationError,
checker.strong_check_password(new_password))
|
telefonicaid/fiware-keystone-spassword
|
keystone_spassword/tests/unit/contrib/spassword/test_checker.py
|
Python
|
apache-2.0
| 1,246
|
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/quicksight/model/SheetControlsOption.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace QuickSight
{
namespace Model
{
SheetControlsOption::SheetControlsOption() :
m_visibilityState(DashboardUIState::NOT_SET),
m_visibilityStateHasBeenSet(false)
{
}
SheetControlsOption::SheetControlsOption(JsonView jsonValue) :
m_visibilityState(DashboardUIState::NOT_SET),
m_visibilityStateHasBeenSet(false)
{
*this = jsonValue;
}
SheetControlsOption& SheetControlsOption::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("VisibilityState"))
{
m_visibilityState = DashboardUIStateMapper::GetDashboardUIStateForName(jsonValue.GetString("VisibilityState"));
m_visibilityStateHasBeenSet = true;
}
return *this;
}
JsonValue SheetControlsOption::Jsonize() const
{
JsonValue payload;
if(m_visibilityStateHasBeenSet)
{
payload.WithString("VisibilityState", DashboardUIStateMapper::GetNameForDashboardUIState(m_visibilityState));
}
return payload;
}
} // namespace Model
} // namespace QuickSight
} // namespace Aws
|
cedral/aws-sdk-cpp
|
aws-cpp-sdk-quicksight/source/model/SheetControlsOption.cpp
|
C++
|
apache-2.0
| 1,760
|
<?php
if (wp_is_mobile())
$footer = 'Mobile';
else
$footer = 'Footer';
if (get_theme_mod('show_footbar', true) && is_dynamic_sidebar($footer)) {
?>
<div id="footbar" class="footbar">
<ul>
<?php
dynamic_sidebar($footer);
?>
</ul>
</div>
<?php } ?>
|
parmaja/wp_metallic
|
sidebar-footer.php
|
PHP
|
apache-2.0
| 289
|
package org.f0w.k2i.core.event;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class ImportStartedEventTest {
@Test
public void constructorInitialization() {
ImportStartedEvent event = new ImportStartedEvent(5);
assertEquals(5, event.listSize);
}
}
|
REDNBLACK/J-Kinopoisk2IMDB
|
core/src/test/java/org/f0w/k2i/core/event/ImportStartedEventTest.java
|
Java
|
apache-2.0
| 308
|
/******************************************************
Copyright (c) 2015, IBM
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of the
License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
*********************************************************/
package com.ibm.si.qradar.offenseviz.api;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ibm.si.qradar.offenseviz.conf.QRadarConfig;
import com.ibm.si.qradar.offenseviz.geoip.GeoInfo;
import com.ibm.si.qradar.offenseviz.geoip.GeoipUtil;
@Path("/iplookup")
public class IPLookupResource {
private static final Logger logger = LoggerFactory.getLogger(IPLookupResource.class);
private static final Double defaultLatitude = QRadarConfig.getInstance().getDefaultLatitude();
private static final Double defaultLongitude = QRadarConfig.getInstance().getDefaultLongitude();
private static final String defaultCountry = QRadarConfig.getInstance().getDefaultCountry();
private static final String defaultCity = QRadarConfig.getInstance().getDefaultCity();
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response lookupIpAddress(@QueryParam("ip") String ipAddress) {
Response response;
GeoipUtil geoipUtil = null;
if(ipAddress != null) {
GeoInfo ginfo = null;
try {
geoipUtil = new GeoipUtil();
ginfo = geoipUtil.getGeoInfo(ipAddress);
if(ginfo == null) {
ginfo = new GeoInfo(defaultLatitude, defaultLongitude, defaultCountry, defaultCity);
}
} catch (Exception e) {
logger.debug("Couldn't look up IP", e);
} finally {
geoipUtil.dispose();
}
response = Response.ok().entity(ginfo.asArray()).build();
} else {
response = Response.ok().build();
}
return response;
}
}
|
ibm-security-intelligence/visualizations
|
incident_overview/src/main/java/com/ibm/si/qradar/offenseviz/api/IPLookupResource.java
|
Java
|
apache-2.0
| 2,370
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
import mock
from airflow.providers.google.cloud.operators.dataflow import (
CheckJobRunning, DataflowCreateJavaJobOperator, DataflowCreatePythonJobOperator,
DataflowTemplatedJobStartOperator,
)
from airflow.version import version
TASK_ID = 'test-dataflow-operator'
JOB_NAME = 'test-dataflow-pipeline'
TEMPLATE = 'gs://dataflow-templates/wordcount/template_file'
PARAMETERS = {
'inputFile': 'gs://dataflow-samples/shakespeare/kinglear.txt',
'output': 'gs://test/output/my_output'
}
PY_FILE = 'gs://my-bucket/my-object.py'
PY_INTERPRETER = 'python3'
JAR_FILE = 'gs://my-bucket/example/test.jar'
JOB_CLASS = 'com.test.NotMain'
PY_OPTIONS = ['-m']
DEFAULT_OPTIONS_PYTHON = DEFAULT_OPTIONS_JAVA = {
'project': 'test',
'stagingLocation': 'gs://test/staging',
}
DEFAULT_OPTIONS_TEMPLATE = {
'project': 'test',
'stagingLocation': 'gs://test/staging',
'tempLocation': 'gs://test/temp',
'zone': 'us-central1-f'
}
ADDITIONAL_OPTIONS = {
'output': 'gs://test/output',
'labels': {'foo': 'bar'}
}
TEST_VERSION = 'v{}'.format(version.replace('.', '-').replace('+', '-'))
EXPECTED_ADDITIONAL_OPTIONS = {
'output': 'gs://test/output',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION}
}
POLL_SLEEP = 30
GCS_HOOK_STRING = 'airflow.providers.google.cloud.operators.dataflow.{}'
TEST_LOCATION = "custom-location"
class TestDataflowPythonOperator(unittest.TestCase):
def setUp(self):
self.dataflow = DataflowCreatePythonJobOperator(
task_id=TASK_ID,
py_file=PY_FILE,
job_name=JOB_NAME,
py_options=PY_OPTIONS,
dataflow_default_options=DEFAULT_OPTIONS_PYTHON,
options=ADDITIONAL_OPTIONS,
poll_sleep=POLL_SLEEP,
location=TEST_LOCATION
)
def test_init(self):
"""Test DataFlowPythonOperator instance is properly initialized."""
self.assertEqual(self.dataflow.task_id, TASK_ID)
self.assertEqual(self.dataflow.job_name, JOB_NAME)
self.assertEqual(self.dataflow.py_file, PY_FILE)
self.assertEqual(self.dataflow.py_options, PY_OPTIONS)
self.assertEqual(self.dataflow.py_interpreter, PY_INTERPRETER)
self.assertEqual(self.dataflow.poll_sleep, POLL_SLEEP)
self.assertEqual(self.dataflow.dataflow_default_options,
DEFAULT_OPTIONS_PYTHON)
self.assertEqual(self.dataflow.options,
EXPECTED_ADDITIONAL_OPTIONS)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
@mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
def test_exec(self, gcs_hook, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_python_workflow.
"""
start_python_hook = dataflow_mock.return_value.start_python_dataflow
gcs_provide_file = gcs_hook.return_value.provide_file
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
expected_options = {
'project': 'test',
'staging_location': 'gs://test/staging',
'output': 'gs://test/output',
'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION}
}
gcs_provide_file.assert_called_once_with(object_url=PY_FILE)
start_python_hook.assert_called_once_with(
job_name=JOB_NAME,
variables=expected_options,
dataflow=mock.ANY,
py_options=PY_OPTIONS,
py_interpreter=PY_INTERPRETER,
py_requirements=[],
py_system_site_packages=False,
on_new_job_id_callback=mock.ANY,
project_id=None,
location=TEST_LOCATION
)
self.assertTrue(self.dataflow.py_file.startswith('/tmp/dataflow'))
class TestDataflowJavaOperator(unittest.TestCase):
def setUp(self):
self.dataflow = DataflowCreateJavaJobOperator(
task_id=TASK_ID,
jar=JAR_FILE,
job_name=JOB_NAME,
job_class=JOB_CLASS,
dataflow_default_options=DEFAULT_OPTIONS_JAVA,
options=ADDITIONAL_OPTIONS,
poll_sleep=POLL_SLEEP,
location=TEST_LOCATION
)
def test_init(self):
"""Test DataflowTemplateOperator instance is properly initialized."""
self.assertEqual(self.dataflow.task_id, TASK_ID)
self.assertEqual(self.dataflow.job_name, JOB_NAME)
self.assertEqual(self.dataflow.poll_sleep, POLL_SLEEP)
self.assertEqual(self.dataflow.dataflow_default_options,
DEFAULT_OPTIONS_JAVA)
self.assertEqual(self.dataflow.job_class, JOB_CLASS)
self.assertEqual(self.dataflow.jar, JAR_FILE)
self.assertEqual(self.dataflow.options,
EXPECTED_ADDITIONAL_OPTIONS)
self.assertEqual(self.dataflow.check_if_running, CheckJobRunning.WaitForRun)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
@mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
def test_exec(self, gcs_hook, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_java_workflow.
"""
start_java_hook = dataflow_mock.return_value.start_java_dataflow
gcs_provide_file = gcs_hook.return_value.provide_file
self.dataflow.check_if_running = CheckJobRunning.IgnoreJob
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
start_java_hook.assert_called_once_with(
job_name=JOB_NAME,
variables=mock.ANY,
jar=mock.ANY,
job_class=JOB_CLASS,
append_job_name=True,
multiple_jobs=None,
on_new_job_id_callback=mock.ANY,
project_id=None,
location=TEST_LOCATION
)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
@mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
def test_check_job_running_exec(self, gcs_hook, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_java_workflow.
"""
dataflow_running = dataflow_mock.return_value.is_job_dataflow_running
dataflow_running.return_value = True
start_java_hook = dataflow_mock.return_value.start_java_dataflow
gcs_provide_file = gcs_hook.return_value.provide_file
self.dataflow.check_if_running = True
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
gcs_provide_file.assert_not_called()
start_java_hook.assert_not_called()
dataflow_running.assert_called_once_with(
name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
@mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
def test_check_job_not_running_exec(self, gcs_hook, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_java_workflow with option to check if job is running
"""
dataflow_running = dataflow_mock.return_value.is_job_dataflow_running
dataflow_running.return_value = False
start_java_hook = dataflow_mock.return_value.start_java_dataflow
gcs_provide_file = gcs_hook.return_value.provide_file
self.dataflow.check_if_running = True
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
start_java_hook.assert_called_once_with(
job_name=JOB_NAME,
variables=mock.ANY,
jar=mock.ANY,
job_class=JOB_CLASS,
append_job_name=True,
multiple_jobs=None,
on_new_job_id_callback=mock.ANY,
project_id=None,
location=TEST_LOCATION
)
dataflow_running.assert_called_once_with(
name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
@mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook')
def test_check_multiple_job_exec(self, gcs_hook, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_java_workflow with option to check multiple jobs
"""
dataflow_running = dataflow_mock.return_value.is_job_dataflow_running
dataflow_running.return_value = False
start_java_hook = dataflow_mock.return_value.start_java_dataflow
gcs_provide_file = gcs_hook.return_value.provide_file
self.dataflow.multiple_jobs = True
self.dataflow.check_if_running = True
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
gcs_provide_file.assert_called_once_with(object_url=JAR_FILE)
start_java_hook.assert_called_once_with(
job_name=JOB_NAME,
variables=mock.ANY,
jar=mock.ANY,
job_class=JOB_CLASS,
append_job_name=True,
multiple_jobs=True,
on_new_job_id_callback=mock.ANY,
project_id=None,
location=TEST_LOCATION
)
dataflow_running.assert_called_once_with(
name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION
)
class TestDataflowTemplateOperator(unittest.TestCase):
def setUp(self):
self.dataflow = DataflowTemplatedJobStartOperator(
task_id=TASK_ID,
template=TEMPLATE,
job_name=JOB_NAME,
parameters=PARAMETERS,
options=DEFAULT_OPTIONS_TEMPLATE,
dataflow_default_options={"EXTRA_OPTION": "TEST_A"},
poll_sleep=POLL_SLEEP,
location=TEST_LOCATION
)
@mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook')
def test_exec(self, dataflow_mock):
"""Test DataflowHook is created and the right args are passed to
start_template_workflow.
"""
start_template_hook = dataflow_mock.return_value.start_template_dataflow
self.dataflow.execute(None)
self.assertTrue(dataflow_mock.called)
expected_options = {
'project': 'test',
'stagingLocation': 'gs://test/staging',
'tempLocation': 'gs://test/temp',
'zone': 'us-central1-f',
'EXTRA_OPTION': "TEST_A"
}
start_template_hook.assert_called_once_with(
job_name=JOB_NAME,
variables=expected_options,
parameters=PARAMETERS,
dataflow_template=TEMPLATE,
on_new_job_id_callback=mock.ANY,
project_id=None,
location=TEST_LOCATION
)
|
wooga/airflow
|
tests/providers/google/cloud/operators/test_dataflow.py
|
Python
|
apache-2.0
| 11,898
|
//
// ViewController.h
// Demo02_GitServer
//
// Created by tarena on 15/10/8.
// Copyright (c) 2015年 tarena. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@end
|
lxpiosdeveloper/GtiTest
|
Demo02_GitServer/Demo02_GitServer/ViewController.h
|
C
|
apache-2.0
| 219
|
Clazz.declarePackage ("J.util");
c$ = Clazz.decorateAsClass (function () {
this.entryCount = 0;
this.entries = null;
Clazz.instantialize (this, arguments);
}, J.util, "Int2IntHash");
Clazz.makeConstructor (c$,
function (initialCapacity) {
this.entries = new Array (initialCapacity);
}, "~N");
$_M(c$, "get",
function (key) {
var entries = this.entries;
var hash = (key & 0x7FFFFFFF) % entries.length;
for (var e = entries[hash]; e != null; e = e.next) if (e.key == key) return e.value;
return -2147483648;
}, "~N");
$_M(c$, "put",
function (key, value) {
var entries = this.entries;
var hash = (key & 0x7FFFFFFF) % entries.length;
for (var e = entries[hash]; e != null; e = e.next) if (e.key == key) {
e.value = value;
return;
}
if (this.entryCount > entries.length) this.rehash ();
entries = this.entries;
hash = (key & 0x7FFFFFFF) % entries.length;
entries[hash] = new J.util.Int2IntHash.Entry (key, value, entries[hash]);
++this.entryCount;
}, "~N,~N");
$_M(c$, "rehash",
($fz = function () {
var oldEntries = this.entries;
var oldSize = oldEntries.length;
var newSize = oldSize * 2 + 1;
var newEntries = new Array (newSize);
for (var i = oldSize; --i >= 0; ) {
for (var e = oldEntries[i]; e != null; ) {
var t = e;
e = e.next;
var hash = (t.key & 0x7FFFFFFF) % newSize;
t.next = newEntries[hash];
newEntries[hash] = t;
}
}
this.entries = newEntries;
}, $fz.isPrivate = true, $fz));
Clazz.pu$h ();
c$ = Clazz.decorateAsClass (function () {
this.key = 0;
this.value = 0;
this.next = null;
Clazz.instantialize (this, arguments);
}, J.util.Int2IntHash, "Entry");
Clazz.makeConstructor (c$,
function (a, b, c) {
this.key = a;
this.value = b;
this.next = c;
}, "~N,~N,J.util.Int2IntHash.Entry");
c$ = Clazz.p0p ();
|
DeepLit/WHG
|
root/static/js/jsmol/j2s/J/util/Int2IntHash.js
|
JavaScript
|
apache-2.0
| 1,785
|
import wasp
def onConflict():
"""
Optional.
A conflict happened during the solving
"""
pass
def onDeletion():
"""
Optional.
The method for deleting clauses is invoked.
"""
pass
def onLearningClause(lbd, size, *lits):
"""
Optional.
When a clause is learnt.
:param lbd: the lbd value of the learnt clause
:param size: the size of the learned clause
:param lits: the literals in the learned clause
"""
pass
def onLitInImportantClause(lit):
"""
Optional.
When a literal appears in special clauses, e.g. glue clauses.
:param lit: the literal in the important clause.
"""
pass
def onLitInvolvedInConflict(lit):
"""
Optional.
When a literal is involved in the computation of the learned clause.
:param lit: the literal involved in the conflict
"""
pass
def onLoopFormula(lbd, size, *lits):
"""
Optional.
When a loop formula is learnt for an unfounded set.
:param lbd: the lbd value of the loop formula
:param size: the size of the loop formula
:param lits: the literals in the loop formula
"""
pass
def onNewClause(*clause):
"""
Optional.
All clauses left after the simplifications are sent to the heuristic using this method
:param clause: the clause
"""
def onRestart():
"""
Optional.
When the solver performs a restart.
"""
pass
def onUnfoundedSet(*unfounded_set):
"""
Optional.
When an unfounded set is found.
:param unfounded_set: all atoms in the unfounded set
"""
pass
def initFallback():
"""
Optional.
Init the activities of variables in the fallback heuristic.
:return: List of pairs (v, i), the activity variable v is associated with i.
"""
pass
def factorFallback():
"""
Optional.
Set the factor for the activities of variables in the fallback heuristic (required fallback method).
:return: list of pairs (v, f), the factor f is associated to the variable v.
"""
pass
def signFallback():
"""
Optional.
Set the preferred polarity for variables in the fallback heuristic (required fallback method).
:return: list of literals
"""
pass
def selectLiteral():
"""
Required.
This method is invoked when a choice is needed. It can return a choice and special
values for performing special actions.
Special values:
- wasp.restart() force the solver to perform a restart
- wasp.fallback(n) use the fallback heuristic for n steps (n<=0 use always fallback heuristic) -> require the presence of the method fallback() in the script
- wasp.unroll(v) unroll the truth value of the variable v
:return: wasp.choice(l), where l is a literal
"""
pass
|
alviano/wasp
|
python_libraries/heuristics/heuristic-instructions.py
|
Python
|
apache-2.0
| 2,804
|
<!DOCTYPE html>
<html>
<meta charset="UTF-8">
<head>
<title>Topic 04 -- Abstracts with Biological Entities (English) - 75 Topics / Sub-Topic Model 01 - 15 Topics</title>
<style>
table {
font-family: "Trebuchet MS", Arial, Helvetica, sans-serif;
border-collapse: collapse;
width: 100%;
}
td, th {
border: 1px solid #ddd;
padding: 8px;
}
tr:nth-child(even){background-color: #f2f2f2;}
tr:hover {background-color: #ddd;}
th {
padding-top: 12px;
padding-bottom: 12px;
text-align: left;
background-color: #0099FF;
color: white;
}
</style>
</head>
<body>
<h2>Topic 04 -- Abstracts with Biological Entities (English) - 75 Topics / Sub-Topic Model 01 - 15 Topics</h2>
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>cite ad</th>
<th>title</th>
<th>authors</th>
<th>publish year</th>
<th>publish time</th>
<th>dataset</th>
<th>abstract mentions covid</th>
<th>pmcid</th>
<th>pubmed id</th>
<th>doi</th>
<th>cord uid</th>
<th>topic weight</th>
<th>Similarity scispacy</th>
<th>Similarity specter</th>
</tr>
</thead>
<tbody>
<tr>
<th id="q6ioewct";>1</th>
<td>Zhang_2013</td>
<td>Endogenous R&D spillover and location choice in a mixed oligopoly</td>
<td>Zhang, Jianhu; Li, Changying</td>
<td>2013</td>
<td>2013-02-14</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7080093" target="_blank">PMC7080093</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/s00168-013-0556-2" target="_blank">10.1007/s00168-013-0556-2</a></td>
<td>q6ioewct</td>
<td>0.962279</td>
<td><a href="Topic_05.html#8plec8t4">Looker_2006</a></td>
<td></td>
</tr>
<tr>
<th id="e7hkfbxu";>2</th>
<td>Tuyon_2016</td>
<td>Behavioural finance perspectives on Malaysian stock market efficiency</td>
<td>Tuyon, Jasman; Ahmad, Zamri</td>
<td>2016</td>
<td>2016-03-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7148656" target="_blank">PMC7148656</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.bir.2016.01.001" target="_blank">10.1016/j.bir.2016.01.001</a></td>
<td>e7hkfbxu</td>
<td>0.785241</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="pm9kdqdw";>3</th>
<td>Kreuder-Sonnen_2019</td>
<td>China vs the WHO: a behavioural norm conflict in the SARS crisis</td>
<td>Kreuder-Sonnen, Christian</td>
<td>2019</td>
<td>2019-05-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7108605" target="_blank">PMC7108605</a></td>
<td></td>
<td><a href="https://doi.org/10.1093/ia/iiz022" target="_blank">10.1093/ia/iiz022</a></td>
<td>pm9kdqdw</td>
<td>0.778644</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="jopxh7mn";>4</th>
<td>Chan_2008</td>
<td>Private space, shared space and private housing prices in Hong Kong: An exploratory study</td>
<td>Chan, Edwin Hon-Wan; So, Hing-Mei; Tang, Bo-Sin; Wong, Wah-Sang</td>
<td>2008</td>
<td>2008-09-30</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7124225" target="_blank">PMC7124225</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.habitatint.2007.11.004" target="_blank">10.1016/j.habitatint.2007.11.004</a></td>
<td>jopxh7mn</td>
<td>0.776649</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="6s9pvy0y";>5</th>
<td>Ho_T_2017</td>
<td>Hong Kong Paradox: Appearance and Disappearance in Western Cinema</td>
<td>Ho, Tammy Lai-Ming</td>
<td>2017</td>
<td>2017-12-27</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7120515" target="_blank">PMC7120515</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/978-981-10-7766-1_4" target="_blank">10.1007/978-981-10-7766-1_4</a></td>
<td>6s9pvy0y</td>
<td>0.748528</td>
<td></td>
<td><a href="Topic_06.html#a6k4s5em">Salazar_2005</a></td>
</tr>
<tr>
<th id="9opqbu7h";>6</th>
<td>Leung_2004</td>
<td>What has luck got to do with economic development? An interpretation of resurgent Asia’s growth experience</td>
<td>Leung, H.M.; Tan, Swee Liang; Yang, Zhen Lin</td>
<td>2004</td>
<td>2004-04-30</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7134611" target="_blank">PMC7134611</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.jpolmod.2004.02.003" target="_blank">10.1016/j.jpolmod.2004.02.003</a></td>
<td>9opqbu7h</td>
<td>0.738815</td>
<td><a href="Topic_05.html#8plec8t4">Looker_2006</a></td>
<td></td>
</tr>
<tr>
<th id="xeaqfxqj";>7</th>
<td>Ruddick_2009</td>
<td>Society–Space</td>
<td>Ruddick, S.M.</td>
<td>2009</td>
<td>2009-12-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7151806" target="_blank">PMC7151806</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/b978-008044910-4.00744-6" target="_blank">10.1016/b978-008044910-4.00744-6</a></td>
<td>xeaqfxqj</td>
<td>0.724640</td>
<td><a href="Topic_05.html#8plec8t4">Looker_2006</a></td>
<td><a href="Topic_09.html#vca203wo">Tufts_2009</a></td>
</tr>
<tr>
<th id="vbnnm9qg";>8</th>
<td>Shih_Pearson_2017</td>
<td>Towards a Practical Cosmopolitanism</td>
<td>Shih Pearson, Justine</td>
<td>2017</td>
<td>2017-12-15</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7123891" target="_blank">PMC7123891</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/978-3-319-69572-3_1" target="_blank">10.1007/978-3-319-69572-3_1</a></td>
<td>vbnnm9qg</td>
<td>0.719119</td>
<td></td>
<td><a href="Topic_04.html#6s9pvy0y">Ho_T_2017</a>, <a href="Topic_04.html#xeaqfxqj">Ruddick_2009</a></td>
</tr>
<tr>
<th id="z9dy0ok2";>9</th>
<td>Khoo_2017</td>
<td>Southeast Asia: Beyond Crises and Traps</td>
<td>Khoo, Boo Teik; Tsunekawa, Keiichi</td>
<td>2017</td>
<td>2017-08-26</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7124048" target="_blank">PMC7124048</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/978-3-319-55038-1_1" target="_blank">10.1007/978-3-319-55038-1_1</a></td>
<td>z9dy0ok2</td>
<td>0.679576</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="s8gknqee";>10</th>
<td>Zhang_2011</td>
<td>Price wars and price collusion in China's airline markets</td>
<td>Zhang, Yahua; Round, David K.</td>
<td>2011</td>
<td>2011-07-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7114114" target="_blank">PMC7114114</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.ijindorg.2010.07.005" target="_blank">10.1016/j.ijindorg.2010.07.005</a></td>
<td>s8gknqee</td>
<td>0.678723</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="hp7p3eu4";>11</th>
<td>Cooper_2015</td>
<td>Stretching health diplomacy beyond ‘Global’ problem solving: Bringing the regional normative dimension in</td>
<td>Cooper, Andrew F; Farooq, Asif B</td>
<td>2015</td>
<td>2015-12-23</td>
<td>COMM-USE</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4639834" target="_blank">PMC4639834</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/26635500.0" target="_blank">26635500.0</a></td>
<td><a href="https://doi.org/10.1177/1468018115599820" target="_blank">10.1177/1468018115599820</a></td>
<td>hp7p3eu4</td>
<td>0.662613</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="bg7133ye";>12</th>
<td>Ouyang_2010</td>
<td>China as a reserve sink: The evidence from offset and sterilization coefficients</td>
<td>Ouyang, Alice Y.; Rajan, Ramkishen S.; Willett, Thomas D.</td>
<td>2010</td>
<td>2010-09-30</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7115424" target="_blank">PMC7115424</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.jimonfin.2009.12.006" target="_blank">10.1016/j.jimonfin.2009.12.006</a></td>
<td>bg7133ye</td>
<td>0.623330</td>
<td></td>
<td><a href="Topic_04.html#a478fdce">Lin_P_2012</a></td>
</tr>
<tr>
<th id="igf0gpu5";>13</th>
<td>Lidén_2014</td>
<td>The World Health Organization and Global Health Governance: post-1990</td>
<td>Lidén, J.</td>
<td>2014</td>
<td>2014-02-28</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7118765" target="_blank">PMC7118765</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/24388640.0" target="_blank">24388640.0</a></td>
<td><a href="https://doi.org/10.1016/j.puhe.2013.08.008" target="_blank">10.1016/j.puhe.2013.08.008</a></td>
<td>igf0gpu5</td>
<td>0.613089</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="xedqhl2m";>14</th>
<td>Lister_2012</td>
<td>The Process and Practice of Negotiation</td>
<td>Lister, Graham; Lee, Kelley</td>
<td>2012</td>
<td>2012-11-07</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7122328" target="_blank">PMC7122328</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/978-1-4614-5401-4_6" target="_blank">10.1007/978-1-4614-5401-4_6</a></td>
<td>xedqhl2m</td>
<td>0.591138</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="afagsm66";>15</th>
<td>Bump_2018</td>
<td>Undernutrition, obesity and governance: a unified framework for upholding the right to food</td>
<td>Bump, Jesse B</td>
<td>2018</td>
<td>2018-10-10</td>
<td>NONCOMM</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6195135" target="_blank">PMC6195135</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/30364379.0" target="_blank">30364379.0</a></td>
<td><a href="https://doi.org/10.1136/bmjgh-2018-000886" target="_blank">10.1136/bmjgh-2018-000886</a></td>
<td>afagsm66</td>
<td>0.569279</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="f6sl2k3n";>16</th>
<td>Grubesic_2009</td>
<td>Spatio-temporal fluctuations in the global airport hierarchies</td>
<td>Grubesic, Tony H.; Matisziw, Timothy C.; Zook, Matthew A.</td>
<td>2009</td>
<td>2009-07-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7126044" target="_blank">PMC7126044</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.jtrangeo.2009.02.003" target="_blank">10.1016/j.jtrangeo.2009.02.003</a></td>
<td>f6sl2k3n</td>
<td>0.560121</td>
<td><a href="Topic_01.html#myjbk7ye">Galea_2005</a></td>
<td></td>
</tr>
<tr>
<th id="dopm7wnv";>17</th>
<td>Su_C_2020</td>
<td>Testing for multiple bubbles in the copper price: Periodically collapsing behavior</td>
<td>Su, Chi-Wei; Wang, Xiao-Qing; Zhu, Haotian; Tao, Ran; Moldovan, Nicoleta-Claudia; Lobonţ, Oana-Ramona</td>
<td>2020</td>
<td>2020-03-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7147841" target="_blank">PMC7147841</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.resourpol.2020.101587" target="_blank">10.1016/j.resourpol.2020.101587</a></td>
<td>dopm7wnv</td>
<td>0.545703</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="0enet6p7";>18</th>
<td>Shan_2017</td>
<td>Strategies for risk management in urban–rural conflict: Two case studies of land acquisition in urbanising China</td>
<td>Shan, Liping; Yu, Ann T.W.; Wu, Yuzhe</td>
<td>2017</td>
<td>2017-01-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7124285" target="_blank">PMC7124285</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.habitatint.2016.11.009" target="_blank">10.1016/j.habitatint.2016.11.009</a></td>
<td>0enet6p7</td>
<td>0.543072</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="a478fdce";>19</th>
<td>Lin_P_2012</td>
<td>Exchange rate pass-through in deflation: The case of Taiwan</td>
<td>Lin, Po-Chun; Wu, Chung-Shu</td>
<td>2012</td>
<td>2012-04-30</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7148987" target="_blank">PMC7148987</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.iref.2011.10.010" target="_blank">10.1016/j.iref.2011.10.010</a></td>
<td>a478fdce</td>
<td>0.539952</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="du5ak60y";>20</th>
<td>Leung_2008</td>
<td>China, Hong Kong and Taiwan, Health Systems of</td>
<td>Leung, G.M.; Wagstaff, A.; Lindelow, M.; Lu, J.R.</td>
<td>2008</td>
<td>2008-12-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7149405" target="_blank">PMC7149405</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/b978-012373960-5.00305-1" target="_blank">10.1016/b978-012373960-5.00305-1</a></td>
<td>du5ak60y</td>
<td>0.538846</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="lovqsclb";>21</th>
<td>Chan_2018</td>
<td>Propinquity</td>
<td>Chan, Jeffrey K. H.</td>
<td>2018</td>
<td>2018-07-04</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7120533" target="_blank">PMC7120533</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/978-981-13-0308-1_3" target="_blank">10.1007/978-981-13-0308-1_3</a></td>
<td>lovqsclb</td>
<td>0.499027</td>
<td><a href="Topic_05.html#8plec8t4">Looker_2006</a></td>
<td><a href="Topic_01.html#dkncpt8g">Bickerstaff_2009</a></td>
</tr>
<tr>
<th id="aizy4n3v";>22</th>
<td>Maier-Knapp_2011</td>
<td>Regional and interregional integrative dynamics of ASEAN and EU in response to the avian influenza</td>
<td>Maier-Knapp, Naila</td>
<td>2011</td>
<td>2011-01-26</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7104606" target="_blank">PMC7104606</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/s10308-011-0289-8" target="_blank">10.1007/s10308-011-0289-8</a></td>
<td>aizy4n3v</td>
<td>0.486267</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="jcsfbxgu";>23</th>
<td>Vogel_2019</td>
<td>1 The nature of airports</td>
<td>Vogel, Hans-Arthur</td>
<td>2019</td>
<td>2019-12-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7150062" target="_blank">PMC7150062</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/b978-0-12-810528-3.00001-9" target="_blank">10.1016/b978-0-12-810528-3.00001-9</a></td>
<td>jcsfbxgu</td>
<td>0.467252</td>
<td></td>
<td><a href="Topic_01.html#fbi082jm">Vogel_2019</a></td>
</tr>
<tr>
<th id="h6wj7m8u";>24</th>
<td>Keil_2007</td>
<td>Governing the Sick City: Urban Governance in the Age of Emerging Infectious Disease</td>
<td>Keil, Roger; Ali, Harris</td>
<td>2007</td>
<td>2007-12-07</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7159713" target="_blank">PMC7159713</a></td>
<td></td>
<td><a href="https://doi.org/10.1111/j.1467-8330.2007.00555.x" target="_blank">10.1111/j.1467-8330.2007.00555.x</a></td>
<td>h6wj7m8u</td>
<td>0.466911</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="myjbk7ye";>25</th>
<td>Galea_2005</td>
<td>Cities and population health</td>
<td>Galea, Sandro; Freudenberg, Nicholas; Vlahov, David</td>
<td>2005</td>
<td>2005-03-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7117054" target="_blank">PMC7117054</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/15589671.0" target="_blank">15589671.0</a></td>
<td><a href="https://doi.org/10.1016/j.socscimed.2004.06.036" target="_blank">10.1016/j.socscimed.2004.06.036</a></td>
<td>myjbk7ye</td>
<td>0.460328</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="y7xbyhly";>26</th>
<td>Joppe_2010</td>
<td>One Country's Transformation to Spa Destination: The Case of Canada</td>
<td>Joppe, Marion</td>
<td>2010</td>
<td>2010-12-31</td>
<td>PMC</td>
<td>N</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1375/jhtm.17.1.117" target="_blank">10.1375/jhtm.17.1.117</a></td>
<td>y7xbyhly</td>
<td>0.453725</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="fbi082jm";>27</th>
<td>Vogel_2019</td>
<td>5 Operating environment</td>
<td>Vogel, Hans-Arthur</td>
<td>2019</td>
<td>2019-12-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7149739" target="_blank">PMC7149739</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/b978-0-12-810528-3.00005-6" target="_blank">10.1016/b978-0-12-810528-3.00005-6</a></td>
<td>fbi082jm</td>
<td>0.438757</td>
<td></td>
<td><a href="Topic_04.html#jcsfbxgu">Vogel_2019</a></td>
</tr>
<tr>
<th id="lzlqpbzz";>28</th>
<td>Pérez_2016</td>
<td>Evolution of research in health geographics through the International Journal of Health Geographics (2002–2015)</td>
<td>Pérez, Sandra; Laperrière, Vincent; Borderon, Marion; Padilla, Cindy; Maignant, Gilles; Oliveau, Sébastien</td>
<td>2016</td>
<td>2016-01-20</td>
<td>COMM-USE</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4719657" target="_blank">PMC4719657</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/26790403.0" target="_blank">26790403.0</a></td>
<td><a href="https://doi.org/10.1186/s12942-016-0032-1" target="_blank">10.1186/s12942-016-0032-1</a></td>
<td>lzlqpbzz</td>
<td>0.428165</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="dr4ekix9";>29</th>
<td>Kshetri_2008</td>
<td>3 Cyber-control in China</td>
<td>Kshetri, Nir</td>
<td>2008</td>
<td>2008-12-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7152031" target="_blank">PMC7152031</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/b978-1-84334-464-3.50003-9" target="_blank">10.1016/b978-1-84334-464-3.50003-9</a></td>
<td>dr4ekix9</td>
<td>0.402668</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a></td>
<td></td>
</tr>
<tr>
<th id="dvbua4pf";>30</th>
<td>Nepal_2007</td>
<td>AIDS denial in Asia: Dimensions and roots</td>
<td>Nepal, Binod</td>
<td>2007</td>
<td>2007-12-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7132452" target="_blank">PMC7132452</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/17548124.0" target="_blank">17548124.0</a></td>
<td><a href="https://doi.org/10.1016/j.healthpol.2007.04.011" target="_blank">10.1016/j.healthpol.2007.04.011</a></td>
<td>dvbua4pf</td>
<td>0.402019</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="xsabhej9";>31</th>
<td>Walsh_2018</td>
<td>Intelligence Tasking and Coordination</td>
<td>Walsh, Patrick F.</td>
<td>2018</td>
<td>2018-09-19</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7121912" target="_blank">PMC7121912</a></td>
<td></td>
<td><a href="https://doi.org/10.1057/978-1-137-51700-5_3" target="_blank">10.1057/978-1-137-51700-5_3</a></td>
<td>xsabhej9</td>
<td>0.395979</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="u7ixlsql";>32</th>
<td>Yau_Y_2010</td>
<td>Domestic waste recycling, collective action and economic incentive: The case in Hong Kong</td>
<td>Yau, Yung</td>
<td>2010</td>
<td>2010-12-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7127748" target="_blank">PMC7127748</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/20619628.0" target="_blank">20619628.0</a></td>
<td><a href="https://doi.org/10.1016/j.wasman.2010.06.009" target="_blank">10.1016/j.wasman.2010.06.009</a></td>
<td>u7ixlsql</td>
<td>0.389560</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="176djnf5";>33</th>
<td>Huynen_2005</td>
<td>The health impacts of globalisation: a conceptual framework</td>
<td>Huynen, Maud MTE; Martens, Pim; Hilderink, Henk BM</td>
<td>2005</td>
<td>2005-08-03</td>
<td>COMM-USE</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC1208931" target="_blank">PMC1208931</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/16078989.0" target="_blank">16078989.0</a></td>
<td><a href="https://doi.org/10.1186/1744-8603-1-14" target="_blank">10.1186/1744-8603-1-14</a></td>
<td>176djnf5</td>
<td>0.369483</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="qxdv88e6";>34</th>
<td>Wallace_2009</td>
<td>Breeding Influenza: The Political Virology of Offshore Farming</td>
<td>Wallace, Robert G.</td>
<td>2009</td>
<td>2009-10-23</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7161869" target="_blank">PMC7161869</a></td>
<td></td>
<td><a href="https://doi.org/10.1111/j.1467-8330.2009.00702.x" target="_blank">10.1111/j.1467-8330.2009.00702.x</a></td>
<td>qxdv88e6</td>
<td>0.368132</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="i8dyu3i4";>35</th>
<td>Wolf_2016</td>
<td>Rethinking Urban Epidemiology: Natures, Networks and Materialities</td>
<td>Wolf, Meike</td>
<td>2016</td>
<td>2016-11-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7165666" target="_blank">PMC7165666</a></td>
<td></td>
<td><a href="https://doi.org/10.1111/1468-2427.12381" target="_blank">10.1111/1468-2427.12381</a></td>
<td>i8dyu3i4</td>
<td>0.362487</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="ywb8n1zr";>36</th>
<td>Chandrasekar_2014</td>
<td>Infectious Diseases Subspecialty: Declining Demand Challenges and Opportunities</td>
<td>Chandrasekar, Pranatharthi; Havlichek, Daniel; Johnson, Leonard B.</td>
<td>2014</td>
<td>2014-12-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7108050" target="_blank">PMC7108050</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/25148890.0" target="_blank">25148890.0</a></td>
<td><a href="https://doi.org/10.1093/cid/ciu656" target="_blank">10.1093/cid/ciu656</a></td>
<td>ywb8n1zr</td>
<td>0.349114</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="s4g3awyc";>37</th>
<td>Christensen_2004</td>
<td>The Politics of SARS – Rational Responses or Ambiguity, Symbols and Chaos?</td>
<td>Christensen, Tom; Painter, Martin</td>
<td>2004</td>
<td>2004-12-31</td>
<td>PMC</td>
<td>N</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1016/s1449-4035(04)70031-4" target="_blank">10.1016/s1449-4035(04)70031-4</a></td>
<td>s4g3awyc</td>
<td>0.320588</td>
<td></td>
<td></td>
</tr>
</tbody>
</table>
</body>
</html>
|
roaminsight/roamresearch
|
docs/CORD19_topics/cord19-2020-04-24-v9/text-ents-en-75-t01-15/Topic_04.html
|
HTML
|
apache-2.0
| 25,172
|
# Scotty
Scotty is a high performance, scalable collector for the
[tricorder](https://github.com/Symantec/tricorder) metrics publishing library.
Scotty provides a RESTful API to grab the latest polled metrics, and it can
push metrics to various persistent stores.
Please see the
[design document](https://docs.google.com/document/d/142Llj30LplgxWhOLOprqH59hS01EJ9iC1THV3no5oy0/pub)
and the
[online code documentation](https://godoc.org/github.com/Symantec/scotty)
for more information.
## Contributions
Prior to receiving information from any contributor, Symantec requires
that all contributors complete, sign, and submit Symantec Personal
Contributor Agreement (SPCA). The purpose of the SPCA is to clearly
define the terms under which intellectual property has been
contributed to the project and thereby allow Symantec to defend the
project should there be a legal dispute regarding the software at some
future time. A signed SPCA is required to be on file before an
individual is given commit privileges to the Symantec open source
project. Please note that the privilege to commit to the project is
conditional and may be revoked by Symantec.
If you are employed by a corporation, a Symantec Corporate Contributor
Agreement (SCCA) is also required before you may contribute to the
project. If you are employed by a company, you may have signed an
employment agreement that assigns intellectual property ownership in
certain of your ideas or code to your company. We require a SCCA to
make sure that the intellectual property in your contribution is
clearly contributed to the Symantec open source project, even if that
intellectual property had previously been assigned by you.
Please complete the SPCA and, if required, the SCCA and return to
Symantec at:
Symantec Corporation
Legal Department
Attention: Product Legal Support Team
350 Ellis Street
Mountain View, CA 94043
Please be sure to keep a signed copy for your records.
## LICENSE
Copyright 2015 Symantec Corporation.
Licensed under the Apache License, Version 2.0 (the “License”); you
may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0 Unless required by
applicable law or agreed to in writing, software distributed under the
License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for
the specific language governing permissions and limitations under the
License.
## Building and Running Tests
When building scotty for the very first time, perform the following steps to
install the correct dependencies.
```
go get github.com/Symantec/scotty
cd $GOPATH/src/github.com/Symantec/scotty
make getdeps
```
From the top level directory of the scotty project:
To run all the tests
```
go test -v ./...
```
To rebuild after doing code changes
```
go install ./...
```
|
Symantec/scotty
|
README.md
|
Markdown
|
apache-2.0
| 2,933
|
/*
* Created on Jul 26, 2010
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright @2010-2011 the original author or authors.
*/
package org.fest.assertions.core;
/**
* Assertions methods applicable to groups of objects (e.g. arrays or collections.)
* @param <S> the "self" type of this assertion class. Please read
* "<a href="http://bit.ly/anMa4g" target="_blank">Emulating 'self types' using Java Generics to simplify fluent
* API implementation</a>" for more details.
*
* @author Yvonne Wang
* @author Alex Ruiz
*/
public interface ObjectEnumerableAssert<S> extends EnumerableAssert<S> {
/**
* Verifies that the actual group contains the given values, in any order.
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws IllegalArgumentException if the given argument is an empty array.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the actual group does not contain the given values.
*/
S contains(Object... values);
/**
* Verifies that the actual group contains only the given values and nothing else, in any order.
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws IllegalArgumentException if the given argument is an empty array.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the actual group does not contain the given values, i.e. the actual group contains some
* or none of the given values, or the actual group contains more values than the given ones.
*/
S containsOnly(Object... values);
/**
* Verifies that the actual group contains the given sequence, without any other values between them.
* @param sequence the sequence of objects to look for.
* @return this assertion object.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the actual group does not contain the given sequence.
*/
S containsSequence(Object... sequence);
/**
* Verifies that the actual group does not contain the given values.
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws IllegalArgumentException if the given argument is an empty array.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the actual group contains any of the given values.
*/
S doesNotContain(Object... values);
/**
* Verifies that the actual group does not contain duplicates.
* @return {@code this} assertion object.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the actual group contains duplicates.
*/
S doesNotHaveDuplicates();
/**
* Verifies that the actual group starts with the given sequence of objects, without any other objects between them.
* Similar to <code>{@link #containsSequence(Object...)}</code>, but it also verifies that the first element in the
* sequence is also first element of the actual group.
* @param sequence the sequence of objects to look for.
* @return this assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws IllegalArgumentException if the given argument is an empty array.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the actual group does not start with the given sequence of objects.
*/
S startsWith(Object... sequence);
/**
* Verifies that the actual group ends with the given sequence of objects, without any other objects between them.
* Similar to <code>{@link #containsSequence(Object...)}</code>, but it also verifies that the last element in the
* sequence is also last element of the actual group.
* @param sequence the sequence of objects to look for.
* @return this assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws IllegalArgumentException if the given argument is an empty array.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the actual group does not end with the given sequence of objects.
*/
S endsWith(Object... sequence);
/**
* Verifies that the actual group contains at least a null element.
* @return {@code this} assertion object.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the actual group does not contain a null element.
*/
S containsNull();
/**
* Verifies that the actual group does not contain null elements.
* @return {@code this} assertion object.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the actual group contains a null element.
*/
S doesNotContainNull();
}
|
nicstrong/fest-assertions-android
|
fest-assert-android/src/main/java/org/fest/assertions/core/ObjectEnumerableAssert.java
|
Java
|
apache-2.0
| 5,621
|
/*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.file;
import org.gradle.api.InvalidUserDataException;
import org.gradle.api.PathValidation;
import org.gradle.api.tasks.util.PatternSet;
import org.gradle.internal.Factory;
import org.gradle.internal.FileUtils;
import org.gradle.internal.exceptions.DiagnosticsVisitor;
import org.gradle.internal.typeconversion.NotationParser;
import org.gradle.internal.typeconversion.UnsupportedNotationException;
import org.gradle.util.DeferredUtil;
import javax.annotation.Nullable;
import java.io.File;
import java.net.URI;
public abstract class AbstractFileResolver implements FileResolver {
private final NotationParser<Object, Object> fileNotationParser;
private final Factory<PatternSet> patternSetFactory;
protected AbstractFileResolver(Factory<PatternSet> patternSetFactory) {
this.fileNotationParser = FileOrUriNotationConverter.parser();
this.patternSetFactory = patternSetFactory;
}
public FileResolver withBaseDir(Object path) {
return new BaseDirFileResolver(resolve(path), patternSetFactory);
}
@Override
public FileResolver newResolver(File baseDir) {
return new BaseDirFileResolver(baseDir, patternSetFactory);
}
@Override
public File resolve(Object path) {
return resolve(path, PathValidation.NONE);
}
@Override
public NotationParser<Object, File> asNotationParser() {
return new NotationParser<Object, File>() {
@Override
public File parseNotation(Object notation) throws UnsupportedNotationException {
// TODO Further differentiate between unsupported notation errors and others (particularly when we remove the deprecated 'notation.toString()' resolution)
return resolve(notation, PathValidation.NONE);
}
@Override
public void describe(DiagnosticsVisitor visitor) {
visitor.candidate("Anything that can be converted to a file, as per Project.file()");
}
};
}
@Override
public File resolve(Object path, PathValidation validation) {
File file = doResolve(path);
file = FileUtils.normalize(file);
validate(file, validation);
return file;
}
@Override
public URI resolveUri(Object path) {
return convertObjectToURI(path);
}
protected abstract File doResolve(Object path);
protected URI convertObjectToURI(Object path) {
Object object = DeferredUtil.unpack(path);
Object converted = fileNotationParser.parseNotation(object);
if (converted instanceof File) {
return resolve(converted).toURI();
}
return (URI) converted;
}
@Nullable
protected File convertObjectToFile(Object path) {
Object object = DeferredUtil.unpack(path);
if (object == null) {
return null;
}
Object converted = fileNotationParser.parseNotation(object);
if (converted instanceof File) {
return (File) converted;
}
throw new InvalidUserDataException(String.format("Cannot convert URL '%s' to a file.", converted));
}
protected void validate(File file, PathValidation validation) {
switch (validation) {
case NONE:
break;
case EXISTS:
if (!file.exists()) {
throw new InvalidUserDataException(String.format("File '%s' does not exist.", file));
}
break;
case FILE:
if (!file.exists()) {
throw new InvalidUserDataException(String.format("File '%s' does not exist.", file));
}
if (!file.isFile()) {
throw new InvalidUserDataException(String.format("File '%s' is not a file.", file));
}
break;
case DIRECTORY:
if (!file.exists()) {
throw new InvalidUserDataException(String.format("Directory '%s' does not exist.", file));
}
if (!file.isDirectory()) {
throw new InvalidUserDataException(String.format("Directory '%s' is not a directory.", file));
}
break;
}
}
@Override
public Factory<PatternSet> getPatternSetFactory() {
return patternSetFactory;
}
}
|
robinverduijn/gradle
|
subprojects/file-collections/src/main/java/org/gradle/api/internal/file/AbstractFileResolver.java
|
Java
|
apache-2.0
| 5,052
|
namespace NServiceBus.Facade.Web.Areas.HelpPage.ModelDescriptions
{
public class KeyValuePairModelDescription : ModelDescription
{
public ModelDescription KeyModelDescription { get; set; }
public ModelDescription ValueModelDescription { get; set; }
}
}
|
PeteW/NServiceBusFacade
|
NServiceBus.Facade.Web/Areas/HelpPage/ModelDescriptions/KeyValuePairModelDescription.cs
|
C#
|
apache-2.0
| 281
|
/*
Copyright 2018 New Vector Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import React from 'react';
import ReactDOM from 'react-dom';
import { throttle } from "lodash";
import { isNullOrUndefined } from "matrix-js-sdk/src/utils";
import dis from '../../../dispatcher/dispatcher';
import MatrixClientContext from "../../../contexts/MatrixClientContext";
import { MatrixClientPeg } from "../../../MatrixClientPeg";
import { replaceableComponent } from "../../../utils/replaceableComponent";
import { ActionPayload } from "../../../dispatcher/payloads";
export const getPersistKey = (appId: string) => 'widget_' + appId;
// Shamelessly ripped off Modal.js. There's probably a better way
// of doing reusable widgets like dialog boxes & menus where we go and
// pass in a custom control as the actual body.
function getContainer(containerId: string): HTMLDivElement {
return document.getElementById(containerId) as HTMLDivElement;
}
function getOrCreateContainer(containerId: string): HTMLDivElement {
let container = getContainer(containerId);
if (!container) {
container = document.createElement("div");
container.id = containerId;
document.body.appendChild(container);
}
return container;
}
interface IProps {
// Unique identifier for this PersistedElement instance
// Any PersistedElements with the same persistKey will use
// the same DOM container.
persistKey: string;
// z-index for the element. Defaults to 9.
zIndex?: number;
style?: React.StyleHTMLAttributes<HTMLDivElement>;
}
/**
* Class of component that renders its children in a separate ReactDOM virtual tree
* in a container element appended to document.body.
*
* This prevents the children from being unmounted when the parent of PersistedElement
* unmounts, allowing them to persist.
*
* When PE is unmounted, it hides the children using CSS. When mounted or updated, the
* children are made visible and are positioned into a div that is given the same
* bounding rect as the parent of PE.
*/
@replaceableComponent("views.elements.PersistedElement")
export default class PersistedElement extends React.Component<IProps> {
private resizeObserver: ResizeObserver;
private dispatcherRef: string;
private childContainer: HTMLDivElement;
private child: HTMLDivElement;
constructor(props: IProps) {
super(props);
this.resizeObserver = new ResizeObserver(this.repositionChild);
// Annoyingly, a resize observer is insufficient, since we also care
// about when the element moves on the screen without changing its
// dimensions. Doesn't look like there's a ResizeObserver equivalent
// for this, so we bodge it by listening for document resize and
// the timeline_resize action.
window.addEventListener('resize', this.repositionChild);
this.dispatcherRef = dis.register(this.onAction);
}
/**
* Removes the DOM elements created when a PersistedElement with the given
* persistKey was mounted. The DOM elements will be re-added if another
* PersistedElement is mounted in the future.
*
* @param {string} persistKey Key used to uniquely identify this PersistedElement
*/
public static destroyElement(persistKey: string): void {
const container = getContainer('mx_persistedElement_' + persistKey);
if (container) {
container.remove();
}
}
static isMounted(persistKey) {
return Boolean(getContainer('mx_persistedElement_' + persistKey));
}
private collectChildContainer = (ref: HTMLDivElement): void => {
if (this.childContainer) {
this.resizeObserver.unobserve(this.childContainer);
}
this.childContainer = ref;
if (ref) {
this.resizeObserver.observe(ref);
}
};
private collectChild = (ref: HTMLDivElement): void => {
this.child = ref;
this.updateChild();
};
public componentDidMount(): void {
this.updateChild();
this.renderApp();
}
public componentDidUpdate(): void {
this.updateChild();
this.renderApp();
}
public componentWillUnmount(): void {
this.updateChildVisibility(this.child, false);
this.resizeObserver.disconnect();
window.removeEventListener('resize', this.repositionChild);
dis.unregister(this.dispatcherRef);
}
private onAction = (payload: ActionPayload): void => {
if (payload.action === 'timeline_resize') {
this.repositionChild();
} else if (payload.action === 'logout') {
PersistedElement.destroyElement(this.props.persistKey);
}
};
private repositionChild = (): void => {
this.updateChildPosition(this.child, this.childContainer);
};
private updateChild(): void {
this.updateChildPosition(this.child, this.childContainer);
this.updateChildVisibility(this.child, true);
}
private renderApp(): void {
const content = <MatrixClientContext.Provider value={MatrixClientPeg.get()}>
<div ref={this.collectChild} style={this.props.style}>
{ this.props.children }
</div>
</MatrixClientContext.Provider>;
ReactDOM.render(content, getOrCreateContainer('mx_persistedElement_'+this.props.persistKey));
}
private updateChildVisibility(child: HTMLDivElement, visible: boolean): void {
if (!child) return;
child.style.display = visible ? 'block' : 'none';
}
private updateChildPosition = throttle((child: HTMLDivElement, parent: HTMLDivElement): void => {
if (!child || !parent) return;
const parentRect = parent.getBoundingClientRect();
Object.assign(child.style, {
zIndex: isNullOrUndefined(this.props.zIndex) ? 9 : this.props.zIndex,
position: 'absolute',
top: parentRect.top + 'px',
left: parentRect.left + 'px',
width: parentRect.width + 'px',
height: parentRect.height + 'px',
});
}, 16, { trailing: true, leading: true });
public render(): JSX.Element {
return <div ref={this.collectChildContainer} />;
}
}
|
matrix-org/matrix-react-sdk
|
src/components/views/elements/PersistedElement.tsx
|
TypeScript
|
apache-2.0
| 6,770
|
module Altera
class Bulgaria
attr_accessor :attr, :attr2
def method_um
a = 1+1
b = 2+2
end
end
end
|
estevao2012/defRefactorRuby
|
toys/ProjetoAula/versions/v1/altera/bulgaria.rb
|
Ruby
|
apache-2.0
| 129
|
<!--<p>Details</p>-->
<div style="width:100%;height:100%;background:white">
<div style="width:100%;padding:5px;background:white">
<md-list-item>
<!--<ng-avatar initials="{{initial}}" round-shape="true" bg-color="#3333cc" text-color="white" width="40"></ng-avatar>-->
<h1 style="color:black">{{employeeinfo[0].name}}</h1>
</md-list-item>
</div>
<md-tabs md-border-bottom style="width:100%;height:100%;min-height: 100%;">
<md-tab label="Profile">
<md-content class="md-padding" style="background: #f2f2f4">
<md-card style="width: 99%;padding: 30px;">
<h3 style="padding-bottom: 10px;">Employee Details</h3>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700;">First Name</label>
<input ng-model="fname" name="fname" ng-readonly="true" required>
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Last Name</label>
<input ng-model="lname" name="lname" ng-readonly="true" required>
</md-input-container>
</div>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Email</label>
<input ng-model="email" name="email" ng-readonly="true" required>
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700;">Designation</label>
<input ng-model="job_title" name="job_title" ng-readonly="true" required>
</md-input-container>
</div>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Joining Date</label>
<input ng-model="joining_date" name="joining_date" ng-readonly="true" required>
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700;">Reporting To</label>
<!-- <input ng-model="reporting_to" name="reporting_to" ng-readonly="true" required>-->
<md-select name="reporting_to" ng-model="reporting_to" placeholder="Reporting Manager" required>
<md-option value="NA">NA</md-option>
<md-option ng-repeat="s in reportingManagers" value="{{s.email}}">
{{s.name}}
</md-option>
</md-select>
</md-input-container>
</div>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Employee Type</label>
<!--<input ng-model="employee_type" name="employee_type" ng-readonly="true" required>-->
<md-select name="employee_type" ng-model="employee_type" placeholder="Employee Type" required>
<md-option value="NA">NA</md-option>
<md-option value="Contractor">Contractor</md-option>
<md-option value="Full Time">Full Time</md-option>
<md-option value="Intern">Intern</md-option>
<md-option value="Part Time">Part Time</md-option>
</md-select>
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700;">Employee Status</label>
<input ng-model="employee_status" name="employee_status" ng-readonly="true" required>
</md-input-container>
</div>
<div style="margin-bottom: 28px;">
<label style="font-size: 13px;font-weight: 700;color: rgba(0,0,0,0.55);">Bio</label>
<trix-editor ng-model-options="{ updateOn: 'blur' }" spellcheck="false" class="trix-content" ng-model="trix" angular-trix trix-initialize="trixInitialize(e, editor);" trix-change="trixChange(e, editor);" trix-selection-change="trixSelectionChange(e, editor);" trix-focus="trixFocus(e, editor);" trix-blur="trixBlur(e, editor);" trix-file-accept="trixFileAccept(e, editor);" trix-attachment-add="trixAttachmentAdd(e, editor);" trix-attachment-remove="trixAttachmentRemove(e, editor);"></trix-editor>
</div>
<div layout-gt-sm="row">
<md-input-container flex-gt-sm>
<label style="font-size: 16px;font-weight: 700;color: rgba(0,0,0,0.65)">Skill Set</label>
<md-chips ng-model="selectedSkills" md-autocomplete-snap
md-transform-chip="transformChip($chip)"
md-require-match="autocompleteDemoRequireMatch">
<md-autocomplete
md-selected-item="selectedItem"
md-search-text="searchText"
md-items="item in querySearch(searchText)"
md-item-text="item.name"
placeholder="Search for skills">
<span md-highlight-text="searchText">{{item.name}}</span>
</md-autocomplete>
<md-chip-template>
<span>
<strong>{{$chip.name}}</strong>
</span>
</md-chip-template>
</md-chips>
</md-input-container>
</div>
<md-button class="md-raised md-primary" ng-click="saveProfile()" style="width: 10%;">Save</md-button>
</md-card>
</md-content>
</md-tab>
<md-tab label="Personal Information">
<md-content class="md-padding" style="background: #f2f2f4">
<md-card style="width: 99%;padding: 30px;">
<h3 style="padding-bottom: 10px;">Personal Details</h3>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700;margin-right:28px">Marital Status</label>
<md-datepicker onkeydown="return false" style="float: left;margin-left: -15px;" name="dob" ng-model="dob" md-placeholder="Birth Date"></md-datepicker>
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700;">Marital Status</label>
<md-select name="marital_s" ng-model="marital_s" placeholder="Marital Status">
<md-option value="Married">Married</md-option>
<md-option value="Single">Single</md-option>
<md-option value="Partnership">Partnership</md-option>
<md-option value="Widowed">Widowed</md-option>
<md-option value="Divorced">Divorced</md-option>
</md-select>
</md-input-container>
</div>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Mobile Number</label>
<input ng-model="mobile" name="mobile" type="number" ng-pattern="/^[7-9][0-9]{9}$/">
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Landline</label>
<input ng-model="phone" name="phone" type="number">
</md-input-container>
</div>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Gender</label>
<!--<input ng-model="employee_type" name="employee_type" ng-readonly="true" required>-->
<md-select name="gender" ng-model="gender" placeholder="Gender">
<md-option value="Male">Male</md-option>
<md-option value="Female">Female</md-option>
</md-select>
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Address</label>
<input ng-model="address" name="address">
</md-input-container>
</div>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700;">City</label>
<input ng-model="city" name="city">
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700;">State</label>
<input ng-model="state" name="state">
</md-input-container>
</div>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700;">Postal Code</label>
<input ng-model="postal_code" name="postal_code" type="number">
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Country</label>
<input ng-model="country" name="country">
</md-input-container>
</div>
<md-button class="md-raised md-primary" ng-click="saveProfile()" style="width: 10%;">Save</md-button>
</md-card>
</md-content>
</md-tab>
<md-tab label="Emergency Contact">
<md-content class="md-padding" style="background: #f2f2f4">
<md-card style="width: 99%;padding: 30px;">
<h3 style="padding-bottom: 10px;">Emergency Contact</h3>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">First Name</label>
<input ng-model="emergency_fname" name="emergency_fname">
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Last Name</label>
<input ng-model="emergency_lname" name="emergency_lname">
</md-input-container>
</div>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Relationship</label>
<input ng-model="emergency_relation" name="emergency_relation">
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Mobile Number</label>
<input ng-model="emergency_mbno" name="emergency_mbno" type="number">
</md-input-container>
</div>
<md-button class="md-raised md-primary" ng-click="saveProfile()" style="width: 10%;">Save</md-button>
</md-card>
</md-content>
</md-tab>
<md-tab label="Bank Account Information">
<md-content class="md-padding" style="background: #f2f2f4">
<md-card style="width: 99%;padding: 30px;">
<h3 style="padding-bottom: 10px;">Account Information</h3>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700;">Bank Name</label>
<input ng-model="bank_name" name="bank_name">
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Account Type</label>
<!--<input ng-model="employee_type" name="employee_type" ng-readonly="true" required>-->
<md-select name="acc_type" ng-model="acc_type" placeholder="Account Type">
<md-option value="Current">Current</md-option>
<md-option value="Saving">Saving</md-option>
<md-option value="Other">Other</md-option>
</md-select>
</md-input-container>
</div>
<div layout-gt-sm="row" style="height:80px">
<md-input-container style="width: 25%;max-width: 25%;" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">Account Number</label>
<input ng-model="acc_number" name="acc_number" type="number">
</md-input-container>
<md-input-container style="width: 25%;max-width: 25%;margin-left:5%" flex-gt-sm>
<label style="font-size: 16px;font-weight: 700">IFSC Code</label>
<input ng-model="ifsc_code" name="ifsc_code">
</md-input-container>
</div>
<md-button class="md-raised md-primary" ng-click="saveProfile()" style="width: 10%;">Save</md-button>
</md-card>
</md-content>
</md-tab>
<md-tab label="Project Information">
<md-content class="md-padding" style="background: #f2f2f4">
<timeline>
<timeline-event ng-repeat="event in events" side="alternate" ng-click="clickEvent()">
<timeline-badge class="{{event.badgeClass}}">
<p style="font-size: 14px; margin: 0px;padding: 0px;">2014</p>
</timeline-badge>
<timeline-panel class="{{event.badgeClass}}">
<timeline-heading>
<h4>{{event.title}}</h4>
</timeline-heading>
</timeline-panel>
</timeline-event>
</timeline>
</md-content>
</md-tab>
</md-tabs>
</div>
|
Hemanth72/HRMS
|
app_client/empdetails/details.html
|
HTML
|
apache-2.0
| 13,812
|
var dir_868198085b236c6136218653797759d1 =
[
[ "ComponentConfigCommand.java", "ComponentConfigCommand_8java.html", [
[ "ComponentConfigCommand", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1ComponentConfigCommand.html", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1ComponentConfigCommand" ]
] ],
[ "ComponentConfigCommandCompleter.java", "ComponentConfigCommandCompleter_8java.html", [
[ "ComponentConfigCommandCompleter", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1ComponentConfigCommandCompleter.html", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1ComponentConfigCommandCompleter" ]
] ],
[ "ComponentNameCompleter.java", "ComponentNameCompleter_8java.html", [
[ "ComponentNameCompleter", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1ComponentNameCompleter.html", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1ComponentNameCompleter" ]
] ],
[ "ComponentPropertyNameCompleter.java", "ComponentPropertyNameCompleter_8java.html", [
[ "ComponentPropertyNameCompleter", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1ComponentPropertyNameCompleter.html", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1ComponentPropertyNameCompleter" ]
] ],
[ "NetworkConfigCommand.java", "NetworkConfigCommand_8java.html", [
[ "NetworkConfigCommand", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1NetworkConfigCommand.html", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1NetworkConfigCommand" ]
] ],
[ "NetworkConfigRegistryCommand.java", "NetworkConfigRegistryCommand_8java.html", [
[ "NetworkConfigRegistryCommand", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1NetworkConfigRegistryCommand.html", "classorg_1_1onosproject_1_1cli_1_1cfg_1_1NetworkConfigRegistryCommand" ]
] ],
[ "package-info.java", "cli_2src_2main_2java_2org_2onosproject_2cli_2cfg_2package-info_8java.html", null ]
];
|
onosfw/apis
|
onos/apis/dir_868198085b236c6136218653797759d1.js
|
JavaScript
|
apache-2.0
| 1,817
|
---
external help file: Microsoft.Azure.Commands.KeyVault.dll-Help.xml
ms.assetid: A7C287C4-E9FD-407A-91BD-EFA17C33FC8B
online version: http://go.microsoft.com/fwlink/?LinkID=690161
schema: 2.0.0
---
# Get-AzureRmKeyVault
## SYNOPSIS
Gets key vaults.
## SYNTAX
### GetVaultByName
```
Get-AzureRmKeyVault [-VaultName] <String> [[-ResourceGroupName] <String>] [<CommonParameters>]
```
### ByDeletedVault
```
Get-AzureRmKeyVault [-VaultName] <String> -Location <String> [-InRemovedState] [<CommonParameters>]
```
### ListVaultsByResourceGroup
```
Get-AzureRmKeyVault [-ResourceGroupName] <String> [<CommonParameters>]
```
### ListAllDeletedVaultsInSubscription
```
Get-AzureRmKeyVault [-InRemovedState] [<CommonParameters>]
```
### ListAllVaultsInSubscription
```
Get-AzureRmKeyVault [-Tag <Hashtable>] [<CommonParameters>]
```
## DESCRIPTION
The **Get-AzureRmKeyVault** cmdlet gets information about the key vaults in a subscription.
You can view all key vaults instances in a subscription, or filter your results by a resource group or a particular key vault.
Note that although specifying the resource group is optional for this cmdlet when you get a single key vault, you should do so for better performance.
## EXAMPLES
### Example 1: Get all key vaults in your current subscription
```
PS C:\>Get-AzureRMKeyVault
```
This command gets all the key vaults in your current subscription.
### Example 2: Get a specific key vault
```
PS C:\>$MyVault = Get-AzureRMKeyVault -VaultName 'Contoso03Vault'
```
This command gets the key vault named Contoso03Vault in your current subscription, and then stores it in the $MyVault variable.
You can inspect the properties of $MyVault to get details about the key vault.
### Example 3: Get key vaults in a resource group
```
PS C:\>Get-AzureRmKeyVault -ResourceGroupName 'ContosoPayRollResourceGroup'
```
This command gets all the key vaults in the resource group named ContosoPayRollResourceGroup.
### Example 4: Get all deleted key vaults in your current subscription
```
PS C:\>Get-AzureRmKeyVault -InRemovedState
```
This command gets all the deleted key vaults in your current subscription.
### Example 5: Get a deleted key vault
```
PS C:\>Get-AzureRMKeyVault -VaultName 'Contoso03Vault' -Location 'eastus2' -InRemovedState
```
This command gets the deleted key vault information named Contoso03Vault in your current subscription and in eastus2 region.
## PARAMETERS
### -InRemovedState
Specifies whether to show the previously deleted vaults in the output.```yaml
Type: SwitchParameter
Parameter Sets: ByDeletedVault, ListAllDeletedVaultsInSubscription
Aliases:
Required: True
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### -Location
The location of the deleted vault.```yaml
Type: String
Parameter Sets: ByDeletedVault
Aliases:
Required: True
Position: Named
Default value: None
Accept pipeline input: True (ByPropertyName)
Accept wildcard characters: False
```
### -ResourceGroupName
Specifies the name of the resource group associated with the key vault or key vaults being queried.
```yaml
Type: String
Parameter Sets: GetVaultByName
Aliases:
Required: False
Position: 1
Default value: None
Accept pipeline input: True (ByPropertyName)
Accept wildcard characters: False
```
```yaml
Type: String
Parameter Sets: ListVaultsByResourceGroup
Aliases:
Required: True
Position: 1
Default value: None
Accept pipeline input: True (ByPropertyName)
Accept wildcard characters: False
```
### -Tag
Specifies the key and value of the specified tag to filter the list of key vaults by hash table.
```yaml
Type: Hashtable
Parameter Sets: ListAllVaultsInSubscription
Aliases:
Required: False
Position: Named
Default value: None
Accept pipeline input: True (ByPropertyName)
Accept wildcard characters: False
```
### -VaultName
Specifies the name of the key vault.
```yaml
Type: String
Parameter Sets: GetVaultByName, ByDeletedVault
Aliases:
Required: True
Position: 0
Default value: None
Accept pipeline input: True (ByPropertyName)
Accept wildcard characters: False
```
### CommonParameters
This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see about_CommonParameters (http://go.microsoft.com/fwlink/?LinkID=113216).
## INPUTS
## OUTPUTS
## NOTES
## RELATED LINKS
[New-AzureRmKeyVault](./New-AzureRmKeyVault.md)
[Remove-AzureRmKeyVault](./Remove-AzureRmKeyVault.md)
|
krkhan/azure-powershell
|
src/ResourceManager/KeyVault/Commands.KeyVault/help/Get-AzureRmKeyVault.md
|
Markdown
|
apache-2.0
| 4,598
|
/**
* Created by Vayne-Lover on 5/8/17.
*/
public class SailBoat extends Boat {
public void move() {
System.out.println("Sail!");
}
}
|
Vayne-Lover/Java
|
head-first-java/chapter7/src/SailBoat.java
|
Java
|
apache-2.0
| 153
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Class com.google.zxing.PlanarYUVLuminanceSource (ZXing 3.3.3 API)</title>
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class com.google.zxing.PlanarYUVLuminanceSource (ZXing 3.3.3 API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../com/google/zxing/PlanarYUVLuminanceSource.html" title="class in com.google.zxing">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?com/google/zxing/class-use/PlanarYUVLuminanceSource.html" target="_top">Frames</a></li>
<li><a href="PlanarYUVLuminanceSource.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class com.google.zxing.PlanarYUVLuminanceSource" class="title">Uses of Class<br>com.google.zxing.PlanarYUVLuminanceSource</h2>
</div>
<div class="classUseContainer">No usage of com.google.zxing.PlanarYUVLuminanceSource</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../com/google/zxing/PlanarYUVLuminanceSource.html" title="class in com.google.zxing">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?com/google/zxing/class-use/PlanarYUVLuminanceSource.html" target="_top">Frames</a></li>
<li><a href="PlanarYUVLuminanceSource.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2007–2018. All rights reserved.</small></p>
</body>
</html>
|
daverix/zxing
|
docs/apidocs/com/google/zxing/class-use/PlanarYUVLuminanceSource.html
|
HTML
|
apache-2.0
| 4,480
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.