repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
julza03/sirius-solver | src/POINT_INTERIEUR/pi_sos1s2.c | <filename>src/POINT_INTERIEUR/pi_sos1s2.c
/*
** Copyright 2007-2018 RTE
** Author: <NAME>
**
** This file is part of Sirius_Solver.
** This program and the accompanying materials are made available under the
** terms of the Eclipse Public License 2.0 which is available at
** http://www.eclipse.org/legal/epl-2.0.
**
** This Source Code may also be made available under the following Secondary
** Licenses when the conditions for such availability set forth in the Eclipse
** Public License, v. 2.0 are satisfied: GNU General Public License, version 3
** or later, which is available at <http://www.gnu.org/licenses/>.
**
** SPDX-License-Identifier: EPL-2.0 OR GPL-3.0
*/
/***********************************************************************
FONCTION: A chaque iteration de point interiuer, calcul de delta S1
et delta S2 (ce calcul est fait apres celui de delta U).
AUTEUR: <NAME>
************************************************************************/
# include "pi_sys.h"
# include "pi_fonctions.h"
# include "pi_define.h"
/*-------------------------------------------------------------------------*/
void PI_Sos1s2( PROBLEME_PI * Pi )
{
int i; int NombreDeVariables; char * TypeDeVariable; double * DeltaS1;
double * DeltaS2; double * UnSurUkMoinsUmin ; double * UnSurUmaxMoinsUk;
double * S1; double * S2; double * DeltaU; double * DeltaUDeltaS1;
double * DeltaUDeltaS2; double Muk; char Type;
NombreDeVariables = Pi->NombreDeVariables;
TypeDeVariable = Pi->TypeDeVariable;
DeltaS1 = Pi->DeltaS1;
DeltaS2 = Pi->DeltaS2;
UnSurUkMoinsUmin = Pi->UnSurUkMoinsUmin;
UnSurUmaxMoinsUk = Pi->UnSurUmaxMoinsUk;
S1 = Pi->S1;
S2 = Pi->S2;
DeltaU = Pi->DeltaU;
DeltaUDeltaS1 = Pi->DeltaUDeltaS1;
DeltaUDeltaS2 = Pi->DeltaUDeltaS2;
if ( Pi->NumeroDIteration <= 1 ) {
for ( i = 0 ; i < NombreDeVariables ; i++ ) {
DeltaS1[i] = 0.;
DeltaS2[i] = 0.;
}
}
if ( Pi->TypeDIteration == AFFINE ) {
for ( i = 0 ; i < NombreDeVariables ; i++ ) {
Type = TypeDeVariable[i];
if ( Type == BORNEE ) {
DeltaS1[i] = ( - UnSurUkMoinsUmin[i] * S1[i] * DeltaU[i] ) - S1[i];
DeltaS2[i] = ( UnSurUmaxMoinsUk[i] * S2[i] * DeltaU[i] ) - S2[i];
}
else if ( Type == BORNEE_INFERIEUREMENT ) {
DeltaS1[i] = ( - UnSurUkMoinsUmin[i] * S1[i] * DeltaU[i] ) - S1[i];
}
else if ( Type == BORNEE_SUPERIEUREMENT ) {
DeltaS2[i] = ( UnSurUmaxMoinsUk[i] * S2[i] * DeltaU[i] ) - S2[i];
}
}
}
else {
Muk = Pi->Muk;
for ( i = 0 ; i < NombreDeVariables ; i++ ) {
Type = TypeDeVariable[i];
if ( Type == BORNEE ) {
DeltaS1[i] = UnSurUkMoinsUmin[i] * ( Muk - ( S1[i] * DeltaU[i] ) -
DeltaUDeltaS1[i] ) - S1[i];
DeltaS2[i] = UnSurUmaxMoinsUk[i] * ( Muk + ( S2[i] * DeltaU[i] ) +
DeltaUDeltaS2[i] ) - S2[i];
}
else if ( Type == BORNEE_INFERIEUREMENT ) {
DeltaS1[i] = UnSurUkMoinsUmin[i] * ( Muk - ( S1[i] * DeltaU[i] ) -
DeltaUDeltaS1[i] ) - S1[i];
}
else if ( Type == BORNEE_SUPERIEUREMENT ) {
DeltaS2[i] = UnSurUmaxMoinsUk[i] * ( Muk + ( S2[i] * DeltaU[i] ) +
DeltaUDeltaS2[i] ) - S2[i];
}
}
}
return;
}
|
paulusrobin/leaf-utilities | appRunner/worker/base.go | <filename>appRunner/worker/base.go
package leafWorker
import (
"context"
leafLogger "github.com/paulusrobin/leaf-utilities/logger/logger"
"os"
"time"
)
type (
IRunner interface {
OperationName() string
Serve(sig chan os.Signal, logger leafLogger.Logger)
}
MiddlewareHandlerFunc func(ctx context.Context) error
MiddlewareFunc func(MiddlewareHandlerFunc, IRunner) MiddlewareHandlerFunc
Runner struct {
operationName string
interval time.Duration
fn MiddlewareHandlerFunc
middlewares []MiddlewareFunc
}
Runners []IRunner
)
func applyMiddleware(runner IRunner, h MiddlewareHandlerFunc, middleware ...MiddlewareFunc) MiddlewareHandlerFunc {
for i := 0; i < len(middleware); i++ {
h = middleware[i](h, runner)
}
return h
}
/*
===============
Runner
===============
*/
func NewRunner(operationName string, interval time.Duration, fn func(ctx context.Context) error, middlewares ...MiddlewareFunc) *Runner {
return &Runner{
operationName: operationName,
interval: interval,
fn: fn,
middlewares: middlewares,
}
}
func (r Runner) OperationName() string {
return r.operationName
}
func (r Runner) Serve(sig chan os.Signal, logger leafLogger.Logger) {
go func() {
r.run(logger)
tick := time.Tick(r.interval)
for {
select {
case <-tick:
r.run(logger)
break
case <-sig:
return
}
}
}()
}
func (r Runner) run(logger leafLogger.Logger) {
ctx := context.Background()
r.fn = applyMiddleware(r, r.fn, r.middlewares...)
if err := r.fn(ctx); err != nil {
logger.StandardLogger().Warnf("[WORKER-SERVER] error on worker: %s", err.Error())
}
}
func (r *Runners) Add(runner IRunner) {
*r = append(*r, runner)
}
|
npocmaka/Windows-Server-2003 | admin/snapin/rolemgr/adminmanageraz.cpp | //+---------------------------------------------------------------------------
//
// Microsoft Windows
// Copyright (C) Microsoft Corporation, 2000 - 2001.
//
// File: AdminManagerAz.cpp
//
// Contents: Implements CAdminManagerAz. CAdminManagerAz is thin wrapper
// for IAzAuthorizationStore interface.
//
// History: 09-08-2001 Hiteshr Created
//
//----------------------------------------------------------------------------
#include "headers.h"
|
hchenc/go-harbor | model_tag.go | /*
* Harbor API
*
* These APIs provide services for manipulating Harbor project.
*
* API version: 2.0
* Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git)
*/
package swagger
import (
"time"
)
type Tag struct {
// The ID of the tag
Id int64 `json:"id,omitempty"`
// The ID of the repository that the tag belongs to
RepositoryId int64 `json:"repository_id,omitempty"`
// The ID of the artifact that the tag attached to
ArtifactId int64 `json:"artifact_id,omitempty"`
// The name of the tag
Name string `json:"name,omitempty"`
// The push time of the tag
PushTime time.Time `json:"push_time,omitempty"`
// The latest pull time of the tag
PullTime time.Time `json:"pull_time,omitempty"`
// The immutable status of the tag
Immutable bool `json:"immutable,omitempty"`
// The attribute indicates whether the tag is signed or not
Signed bool `json:"signed,omitempty"`
}
|
turp1twin/cql-query-engine | src/lib/fhir/questionnaireAnswers.js | // Copyright (c) 2014 The MITRE Corporation
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of HL7 nor the names of its contributors may be used to
// endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
// NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
import * as DT from '../cql/cqlDatatypes'
import { Identifier, DomainResource, Reference, BackboneElement, Quantity, Attachment, Coding } from './core'
/**
Embedded class
@class QuestionAnswerComponent
@exports QuestionAnswerComponent as QuestionAnswerComponent
*/
class QuestionAnswerComponent extends BackboneElement {
/**
Single-valued answer to the question.
@returns {Array} an array of {@link boolean} objects
*/
valueBoolean () { return this.json['valueBoolean'] }
/**
Single-valued answer to the question.
@returns {Array} an array of {@link Number} objects
*/
valueDecimal () { return this.json['valueDecimal'] }
/**
Single-valued answer to the question.
@returns {Array} an array of {@link Number} objects
*/
valueInteger () { return this.json['valueInteger'] }
/**
Single-valued answer to the question.
@returns {Array} an array of {@link Date} objects
*/
valueDate () { if (this.json['valueDate']) { return DT.DateTime.parse(this.json['valueDate']) } }
/**
Single-valued answer to the question.
@returns {Array} an array of {@link Date} objects
*/
valueDateTime () { if (this.json['valueDateTime']) { return DT.DateTime.parse(this.json['valueDateTime']) } }
/**
Single-valued answer to the question.
@returns {Array} an array of {@link Date} objects
*/
valueInstant () { if (this.json['valueInstant']) { return DT.DateTime.parse(this.json['valueInstant']) } }
/**
Single-valued answer to the question.
@returns {time}
*/
valueTime () { if (this.json['valueTime']) { return new time(this.json['valueTime']) } }
/**
Single-valued answer to the question.
@returns {Array} an array of {@link String} objects
*/
valueString () { return this.json['valueString'] }
/**
Single-valued answer to the question.
@returns {Attachment}
*/
valueAttachment () { if (this.json['valueAttachment']) { return new Attachment(this.json['valueAttachment']) } }
/**
Single-valued answer to the question.
@returns {Coding}
*/
valueCoding () { if (this.json['valueCoding']) { return new Coding(this.json['valueCoding']) } }
/**
Single-valued answer to the question.
@returns {Quantity}
*/
valueQuantity () { if (this.json['valueQuantity']) { return new Quantity(this.json['valueQuantity']) } }
/**
Single-valued answer to the question.
@returns {Reference}
*/
valueReference () { if (this.json['valueReference']) { return new Reference(this.json['valueReference']) } }
}
/**
Embedded class
@class QuestionComponent
@exports QuestionComponent as QuestionComponent
*/
class QuestionComponent extends BackboneElement {
/**
Identifies the question from the Questionnaire that corresponds to this question in the QuestionnaireAnswers resource.
@returns {Array} an array of {@link String} objects
*/
linkId () { return this.json['linkId'] }
/**
Text of the question as it is shown to the user.
@returns {Array} an array of {@link String} objects
*/
text () { return this.json['text'] }
/**
The respondent's answer(s) to the question.
@returns {Array} an array of {@link QuestionAnswerComponent} objects
*/
answer () {
if (this.json['answer']) {
return this.json['answer'].map(item => new QuestionAnswerComponent(item))
}
}
/**
Nested group, containing nested question for this question. The order of groups within the question is relevant.
@returns {Array} an array of {@link GroupComponent} objects
*/
group () {
if (this.json['group']) {
return this.json['group'].map(item => new GroupComponent(item))
}
}
}
/**
Embedded class
@class GroupComponent
@exports GroupComponent as GroupComponent
*/
class GroupComponent extends BackboneElement {
/**
Identifies the group from the Questionnaire that corresponds to this group in the QuestionnaireAnswers resource.
@returns {Array} an array of {@link String} objects
*/
linkId () { return this.json['linkId'] }
/**
Text that is displayed above the contents of the group.
@returns {Array} an array of {@link String} objects
*/
title () { return this.json['title'] }
/**
Additional text for the group, used for display purposes.
@returns {Array} an array of {@link String} objects
*/
text () { return this.json['text'] }
/**
More specific subject this section's answers are about, details the subject given in QuestionnaireAnswers.
@returns {Reference}
*/
subject () { if (this.json['subject']) { return new Reference(this.json['subject']) } }
/**
A sub-group within a group. The ordering of groups within this group is relevant.
@returns {Array} an array of {@link GroupComponent} objects
*/
group () {
if (this.json['group']) {
return this.json['group'].map(item => new GroupComponent(item))
}
}
/**
Set of questions within this group. The order of questions within the group is relevant.
@returns {Array} an array of {@link QuestionComponent} objects
*/
question () {
if (this.json['question']) {
return this.json['question'].map(item => new QuestionComponent(item))
}
}
}
/**
A structured set of questions and their answers. The questions are ordered and grouped into coherent subsets, corresponding to the structure of the grouping of the underlying questions.
@class QuestionnaireAnswers
@exports QuestionnaireAnswers as QuestionnaireAnswers
*/
export class QuestionnaireAnswers extends DomainResource {
/**
A business identifier assigned to a particular completed (or partially completed) questionnaire.
@returns {Identifier}
*/
identifier () { if (this.json['identifier']) { return new Identifier(this.json['identifier']) } }
/**
Indicates the Questionnaire resource that defines the form for which answers are being provided.
@returns {Reference}
*/
questionnaire () { if (this.json['questionnaire']) { return new Reference(this.json['questionnaire']) } }
/**
The lifecycle status of the questionnaire answers as a whole.
@returns {Array} an array of {@link String} objects
*/
status () { return this.json['status'] }
/**
The subject of the questionnaire answers. This could be a patient, organization, practitioner, device, etc. This is who/what the answers apply to, but is not necessarily the source of information.
@returns {Reference}
*/
subject () { if (this.json['subject']) { return new Reference(this.json['subject']) } }
/**
Person who received the answers to the questions in the QuestionnaireAnswers and recorded them in the system.
@returns {Reference}
*/
author () { if (this.json['author']) { return new Reference(this.json['author']) } }
/**
The date and/or time that this version of the questionnaire answers was authored.
@returns {Array} an array of {@link Date} objects
*/
authored () { if (this.json['authored']) { return DT.DateTime.parse(this.json['authored']) } }
/**
The person who answered the questions about the subject. Only used when this is not the subject him/herself.
@returns {Reference}
*/
source () { if (this.json['source']) { return new Reference(this.json['source']) } }
/**
Encounter during which this set of questionnaire answers were collected. When there were multiple encounters, this is the one considered most relevant to the context of the answers.
@returns {Reference}
*/
encounter () { if (this.json['encounter']) { return new Reference(this.json['encounter']) } }
/**
A group of questions to a possibly similarly grouped set of questions in the questionnaire answers.
@returns {GroupComponent}
*/
group () { if (this.json['group']) { return new GroupComponent(this.json['group']) } }
}
|
Sajaki/intellij-community | platform/lang-impl/testSources/com/intellij/execution/actions/ConfigurationContextTest.java | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.actions;
import com.intellij.execution.Location;
import com.intellij.execution.PsiLocation;
import com.intellij.execution.RunManager;
import com.intellij.execution.RunnerAndConfigurationSettings;
import com.intellij.execution.configurations.ConfigurationFactory;
import com.intellij.execution.impl.FakeConfigurationFactory;
import com.intellij.execution.impl.FakeRunConfiguration;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.extensions.ExtensionPoint;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.PsiElement;
import com.intellij.testFramework.MapDataContext;
import com.intellij.testFramework.fixtures.BasePlatformTestCase;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.junit.Assert;
import java.util.List;
import java.util.Objects;
public class ConfigurationContextTest extends BasePlatformTestCase {
public void testBasicExistingConfigurations() {
myFixture.configureByText(FileTypes.PLAIN_TEXT, "qq<caret>q");
ConfigurationContext context = ConfigurationContext.getFromContext(createDataContext());
Assert.assertNull(context.findExisting());
Disposable disposable = Disposer.newDisposable();
RunConfigurationProducer.EP_NAME.getPoint(null).registerExtension(new FakeRunConfigurationProducer(""), disposable);
List<RunnerAndConfigurationSettings> configs = getConfigurationsFromContext();
Assert.assertEquals(1, configs.size());
for (RunnerAndConfigurationSettings config : configs) {
addConfiguration(config);
}
context = ConfigurationContext.getFromContext(createDataContext());
RunnerAndConfigurationSettings existing = Objects.requireNonNull(context.findExisting());
Assert.assertTrue(existing.getConfiguration() instanceof FakeRunConfiguration);
Disposer.dispose(disposable);
context = ConfigurationContext.getFromContext(createDataContext());
Assert.assertNull(context.findExisting());
}
public void testPreferredExistingConfiguration() {
myFixture.configureByText(FileTypes.PLAIN_TEXT, "hello,<caret>world");
@SuppressWarnings("rawtypes")
ExtensionPoint<RunConfigurationProducer> ep = RunConfigurationProducer.EP_NAME.getPoint(null);
ep.registerExtension(new FakeRunConfigurationProducer("hello_"), getTestRootDisposable());
ep.registerExtension(new FakeRunConfigurationProducer("world_"), getTestRootDisposable());
FakeRunConfigurationProducer.SORTING = SortingMode.NONE;
List<RunnerAndConfigurationSettings> configs = getConfigurationsFromContext();
Assert.assertEquals(2, configs.size());
for (RunnerAndConfigurationSettings config : configs) {
addConfiguration(config);
}
FakeRunConfigurationProducer.SORTING = SortingMode.NAME_ASC;
ConfigurationContext context = ConfigurationContext.getFromContext(createDataContext());
RunnerAndConfigurationSettings existing = Objects.requireNonNull(context.findExisting());
Assert.assertTrue(existing.getConfiguration().getName().startsWith("hello_"));
FakeRunConfigurationProducer.SORTING = SortingMode.NAME_DESC;
context = ConfigurationContext.getFromContext(createDataContext());
existing = Objects.requireNonNull(context.findExisting());
Assert.assertTrue(existing.getConfiguration().getName().startsWith("world_"));
}
private @NotNull DataContext createDataContext() {
MapDataContext dataContext = new MapDataContext();
dataContext.put(CommonDataKeys.PROJECT, getProject());
int offset = myFixture.getEditor().getCaretModel().getOffset();
PsiElement element = Objects.requireNonNull(myFixture.getFile().findElementAt(offset));
dataContext.put(Location.DATA_KEY, PsiLocation.fromPsiElement(element));
return dataContext;
}
private void addConfiguration(@NotNull RunnerAndConfigurationSettings configuration) {
Assert.assertTrue(configuration.getConfiguration() instanceof FakeRunConfiguration);
final RunManager runManager = RunManager.getInstance(getProject());
runManager.addConfiguration(configuration);
Disposer.register(getTestRootDisposable(), new Disposable() {
@Override
public void dispose() {
runManager.removeConfiguration(configuration);
}
});
}
@NotNull
private List<RunnerAndConfigurationSettings> getConfigurationsFromContext() {
DataContext dataContext = createDataContext();
List<ConfigurationFromContext> list = PreferredProducerFind.getConfigurationsFromContext(
dataContext.getData(Location.DATA_KEY),
ConfigurationContext.getFromContext(dataContext),
false
);
return ContainerUtil.map(list, ConfigurationFromContext::getConfigurationSettings);
}
private static class FakeRunConfigurationProducer extends LazyRunConfigurationProducer<FakeRunConfiguration> {
private static SortingMode SORTING = SortingMode.NONE;
private final String myNamePrefix;
FakeRunConfigurationProducer(@NotNull String namePrefix) {
myNamePrefix = namePrefix;
}
@Override
protected boolean setupConfigurationFromContext(@NotNull FakeRunConfiguration configuration,
@NotNull ConfigurationContext context,
@NotNull Ref<PsiElement> sourceElement) {
configuration.setName(myNamePrefix + configuration.getName());
sourceElement.set(context.getPsiLocation());
return true;
}
@Override
public boolean isConfigurationFromContext(@NotNull FakeRunConfiguration configuration, @NotNull ConfigurationContext context) {
return configuration.getName().startsWith(myNamePrefix);
}
@Override
public boolean isPreferredConfiguration(ConfigurationFromContext self, ConfigurationFromContext other) {
FakeRunConfiguration selfConfig = ObjectUtils.tryCast(self.getConfiguration(), FakeRunConfiguration.class);
FakeRunConfiguration otherConfig = ObjectUtils.tryCast(other.getConfiguration(), FakeRunConfiguration.class);
if (selfConfig == null || otherConfig == null) {
return false;
}
if (SORTING == SortingMode.NAME_ASC) {
return selfConfig.getName().compareTo(otherConfig.getName()) < 0;
}
if (SORTING == SortingMode.NAME_DESC) {
return selfConfig.getName().compareTo(otherConfig.getName()) > 0;
}
return false;
}
@NotNull
@Override
public ConfigurationFactory getConfigurationFactory() {
return FakeConfigurationFactory.INSTANCE;
}
}
private enum SortingMode { NAME_ASC, NAME_DESC, NONE }
}
|
bumswerner/new-fsi | test/system/notificationtypes_test.rb | require "application_system_test_case"
class NotificationtypesTest < ApplicationSystemTestCase
# test "visiting the index" do
# visit notificationtypes_url
#
# assert_selector "h1", text: "Notificationtype"
# end
end
|
dropbox/DropboxBusinessScripts | Groups/ListGroupFolderPermissions.py | import urllib2
import json
import argparse
import sys
import csv
reload(sys)
sys.setdefaultencoding('UTF8')
parser = argparse.ArgumentParser(description='Lists all folders and folder permissions for groups in a DB or DE team.')
parser.add_argument('-g', '--group', dest='groups', action='append', help='Target group name to scan. All groups will '
'be scanned be returned if unspecified. You '
'may pass multiple -g arguments.')
args = parser.parse_args()
dfbToken = raw_input('Enter your Dropbox Business API App token (Team Member File Access permission): ')
# Get all DfB Groups
def get_groups():
request = urllib2.Request('https://api.dropbox.com/1/team/groups/list', json.dumps({}))
request.add_header("Authorization", "Bearer "+dfbToken)
request.add_header("Content-type", 'application/json')
try:
response = json.loads(urllib2.urlopen(request).read())
return response["groups"]
# Exit on error here. Probably bad OAuth token. Show DfB response.
except urllib2.HTTPError, error:
parser.error(error.read())
# Return member id of the first member that belongs to the specified group
def get_first_group_member(group_id):
data = {"group_ids": [group_id]}
request = urllib2.Request('https://api.dropbox.com/1/team/groups/get_info', json.dumps(data))
request.add_header("Authorization", "Bearer "+dfbToken)
request.add_header("Content-type", 'application/json')
try:
response = json.loads(urllib2.urlopen(request).read())
return response["groups"][0]["members"][0]["profile"]["member_id"]
except urllib2.HTTPError, error:
parser.error(error.read())
# Find all folders in a particular group (by searching for a member & including unmounted folders)
# will also print groups that the first user found of the anchor group is a part of and flag them as checked
def get_group_folders(csv_writer, checked, anchor_group_id):
checking = {anchor_group_id: []}
try:
request = urllib2.Request('https://api.dropbox.com/1/shared_folders'
'?include_membership=true&show_unmounted=true')
request.add_header("Authorization", "Bearer "+dfbToken)
request.add_header("X-Dropbox-Perform-As-Team-Member", get_first_group_member(anchor_group_id))
response_string = urllib2.urlopen(request).read()
response = json.loads(response_string)
# for all groups that each shared folder has access to, add line to print to each group's folders array
for folder in response:
# for each group that has access to a folder
for folder_group in folder['groups']:
folder_group_id = folder_group['group']['id']
# verify that the group is in the list of currently active and inhabited groups
# and the group hasn't already been checked/printed out
if folder_group_id in checked and checked[folder_group_id] is False:
# if this group passes those but isn't already being tracked for this user,
# add it to our checking list
if folder_group_id not in checking:
checking[folder_group_id] = []
# log the folder in the list of folders this group has access to
checking[folder_group_id].append([
folder_group['group']['display_name'].encode("utf-8"),
folder_group['access_type'],
folder['shared_folder_id'],
folder["owner"]["display_name"],
folder['shared_folder_name'].encode("utf-8")
])
except urllib2.HTTPError as error:
sys.stderr.write(" ERROR: {}\n".format(error))
# flip the checked flag to true and print out folders by group
for g in checking:
checked[g] = True
for f in checking[g]:
csv_writer.writerow(f)
csv_writer = csv.writer(sys.stdout)
csv_writer.writerow(['Group Name', 'Group Access', 'Shared Folder Id', 'Shared Owner', 'Shared Folder Name'])
# find dfb groups
groups = get_groups()
# create a dictionary flagging if a group was checked from a previous group's first member
checkedGroups = dict()
# validate user entry of groups (if applicable) - either add just the specified groups as checking, else add all groups
if args.groups is not None:
groupNames = dict()
for group in groups:
groupNames[group['group_name']] = group['group_id']
for group in args.groups:
if group not in groupNames:
parser.error("Group " + group + " does not exist")
else:
checkedGroups[group['group_id']] = False
else:
for group in groups:
checkedGroups[group['group_id']] = False
# print folders for each group, so long as they have members and haven't been checked yet
for group in groups:
if (args.groups is None or group["group_name"] in args.groups) and \
group["num_members"] > 0 and checkedGroups[group["group_id"]] is False:
get_group_folders(csv_writer, checkedGroups, group["group_id"])
|
LinkinW92/htd | skeqi-common/src/main/java/com/skeqi/common/core/domain/entity/SysMenu.java | package com.skeqi.common.core.domain.entity;
import com.baomidou.mybatisplus.annotation.*;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Size;
import java.io.Serializable;
import java.util.*;
/**
* 菜单权限表 sys_menu
*
* @author skeqi
*/
@Data
@NoArgsConstructor
@Accessors(chain = true)
@TableName("sys_menu")
public class SysMenu implements Serializable
{
private static final long serialVersionUID = 1L;
/** 菜单ID */
@TableId(value = "menu_id",type = IdType.AUTO)
private Long menuId;
/** 菜单名称 */
@NotBlank(message = "菜单名称不能为空")
@Size(min = 0, max = 50, message = "菜单名称长度不能超过50个字符")
private String menuName;
/** 父菜单名称 */
@TableField(exist = false)
private String parentName;
/** 父菜单ID */
private Long parentId;
/** 显示顺序 */
@NotBlank(message = "显示顺序不能为空")
private String orderNum;
/** 路由地址 */
@Size(min = 0, max = 200, message = "路由地址不能超过200个字符")
private String path;
/** 组件路径 */
@Size(min = 0, max = 200, message = "组件路径不能超过255个字符")
private String component;
/** 是否为外链(0是 1否) */
private String isFrame;
/** 是否缓存(0缓存 1不缓存) */
private String isCache;
/** 类型(M目录 C菜单 F按钮) */
@NotBlank(message = "菜单类型不能为空")
private String menuType;
/** 显示状态(0显示 1隐藏) */
private String visible;
/** 菜单状态(0显示 1隐藏) */
private String status;
/** 权限字符串 */
@Size(min = 0, max = 100, message = "权限标识长度不能超过100个字符")
private String perms;
/** 菜单图标 */
private String icon;
/** 创建者 */
@TableField(fill = FieldFill.INSERT)
private String createBy;
/** 创建时间 */
@TableField(fill = FieldFill.INSERT)
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date createTime;
/** 更新者 */
@TableField(fill = FieldFill.INSERT_UPDATE)
private String updateBy;
/** 更新时间 */
@TableField(fill = FieldFill.INSERT_UPDATE)
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private Date updateTime;
/** 备注 */
private String remark;
/** 请求参数 */
@TableField(exist = false)
private Map<String, Object> params = new HashMap<>();
/** 子菜单 */
@TableField(exist = false)
private List<SysMenu> children = new ArrayList<SysMenu>();
}
|
LaudateCorpus1/weblogic-image-tool | tests/src/test/java/com/oracle/weblogic/imagetool/tests/ITImagetool.java | // Copyright (c) 2020, 2021, Oracle and/or its affiliates.
// Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
package com.oracle.weblogic.imagetool.tests;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
import com.oracle.weblogic.imagetool.cli.menu.KubernetesTarget;
import com.oracle.weblogic.imagetool.logging.LoggingFacade;
import com.oracle.weblogic.imagetool.logging.LoggingFactory;
import com.oracle.weblogic.imagetool.tests.annotations.IntegrationTest;
import com.oracle.weblogic.imagetool.tests.annotations.Logger;
import com.oracle.weblogic.imagetool.tests.utils.CacheCommand;
import com.oracle.weblogic.imagetool.tests.utils.CommandResult;
import com.oracle.weblogic.imagetool.tests.utils.CreateAuxCommand;
import com.oracle.weblogic.imagetool.tests.utils.CreateCommand;
import com.oracle.weblogic.imagetool.tests.utils.RebaseCommand;
import com.oracle.weblogic.imagetool.tests.utils.Runner;
import com.oracle.weblogic.imagetool.tests.utils.UpdateCommand;
import com.oracle.weblogic.imagetool.util.Utils;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.junit.jupiter.api.TestMethodOrder;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assumptions.assumeTrue;
@IntegrationTest
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
class ITImagetool {
@Logger
private static final LoggingFacade logger = LoggingFactory.getLogger(ITImagetool.class);
// STAGING_DIR - directory where JDK and other installers are pre-staged before testing
private static final String STAGING_DIR = System.getProperty("STAGING_DIR");
private static final String wlsImgBldDir = System.getProperty("WLSIMG_BLDDIR");
private static final String wlsImgCacheDir = System.getProperty("WLSIMG_CACHEDIR");
// Docker images
private static final String DB_IMAGE = System.getProperty("DB_IMAGE");
// Staging Dir files
private static final String JDK_INSTALLER = "jdk-8u202-linux-x64.tar.gz";
private static final String JDK_INSTALLER_NEWER = "jdk-8u231-linux-x64.tar.gz";
private static final String WLS_INSTALLER = "fmw_12.2.1.3.0_wls_Disk1_1of1.zip";
private static final String P27342434_INSTALLER = "p27342434_122130_Generic.zip";
private static final String P28186730_INSTALLER = "p28186730_139422_Generic.zip";
private static final String WDT_INSTALLER = "weblogic-deploy.zip";
private static final String FMW_INSTALLER = "fmw_12.2.1.3.0_infrastructure_Disk1_1of1.zip";
private static final String TEST_ENTRY_KEY = "mytestEntryKey";
private static final String P27342434_ID = "27342434";
private static final String P28186730_ID = "28186730";
private static final String WLS_VERSION = "12.2.1.3.0";
private static final String OPATCH_VERSION = "13.9.4.2.2";
private static final String JDK_VERSION = "8u202";
private static final String JDK_VERSION_8u212 = "8u212";
private static final String WDT_VERSION = "1.1.2";
private static final Path WDT_ARCHIVE = Paths.get("target", "wdt", "archive.zip");
private static final Path WDT_RESOURCES = Paths.get("src", "test", "resources", "wdt");
private static final Path WDT_VARIABLES = WDT_RESOURCES.resolve("domain.properties");
private static final Path WDT_MODEL = WDT_RESOURCES.resolve("simple-topology.yaml");
private static final String WDT_MODEL1 = "simple-topology1.yaml";
private static final Path WDT_MODEL2 = WDT_RESOURCES.resolve("simple-topology2.yaml");
private static String dbContainerName = "";
private static String build_tag = "";
private static String oracleSupportUsername;
private static boolean wlsImgBuilt = false;
private static boolean domainImgBuilt = false;
private static void validateEnvironmentSettings() {
logger.info("Initializing the tests ...");
List<String> missingSettings = new ArrayList<>();
if (Utils.isEmptyString(wlsImgBldDir)) {
missingSettings.add("WLSIMG_BLDDIR");
}
if (Utils.isEmptyString(wlsImgCacheDir)) {
missingSettings.add("WLSIMG_CACHEDIR");
}
if (Utils.isEmptyString(STAGING_DIR)) {
missingSettings.add("STAGING_DIR");
}
if (Utils.isEmptyString(DB_IMAGE)) {
missingSettings.add("DB_IMAGE");
}
if (missingSettings.size() > 0) {
String error = String.join(", ", missingSettings)
+ " must be set as a system property in the pom.xml";
throw new IllegalArgumentException(error);
}
// get the build tag from Jenkins build environment variable BUILD_TAG
build_tag = System.getenv("BUILD_TAG");
if (build_tag != null) {
build_tag = build_tag.toLowerCase();
} else {
build_tag = "imagetool-itest";
}
dbContainerName = "InfraDB4" + build_tag;
logger.info("build_tag = " + build_tag);
logger.info("WLSIMG_BLDDIR = " + wlsImgBldDir);
logger.info("WLSIMG_CACHEDIR = " + wlsImgCacheDir);
logger.info("STAGING_DIR = " + STAGING_DIR);
logger.info("DB_IMAGE = " + DB_IMAGE);
}
private static void verifyStagedFiles(String... installers) {
// determine if any of the required installers are missing from the stage directory
List<String> missingInstallers = new ArrayList<>();
for (String installer : installers) {
Path installFile = Paths.get(STAGING_DIR, installer);
if (!Files.exists(installFile)) {
missingInstallers.add(installer);
}
}
if (missingInstallers.size() > 0) {
String error = "Could not find these installers in the staging directory: " + STAGING_DIR + "\n ";
error += String.join("\n ", missingInstallers);
throw new IllegalStateException(error);
}
}
private static void executeNoVerify(String command) throws Exception {
logger.info("executing command: " + command);
Runner.run(command);
}
private static void checkCmdInLoop(String cmd) throws Exception {
final int maxIterations = 50;
final String matchStr = "healthy";
int i = 0;
while (i < maxIterations) {
CommandResult result = Runner.run(cmd);
// pod might not have been created or if created loop till condition
if (result.exitValue() != 0
|| (result.exitValue() == 0 && !result.stdout().contains(matchStr))) {
// check for last iteration
if (i == (maxIterations - 1)) {
throw new RuntimeException(
"FAILURE: " + cmd + " does not return the expected string " + matchStr + ", exiting!");
}
final int waitTime = 5;
logger.info("Waiting for the expected String {0}: Iter [{1}/{2}], sleeping {3} seconds more",
matchStr, i, maxIterations, waitTime);
Thread.sleep(waitTime * 1000);
i++;
} else {
logger.info("get the expected String " + matchStr);
break;
}
}
}
private static void cleanup() throws Exception {
logger.info("cleaning up the test environment ...");
// clean up the db container
String command = "docker rm -f -v " + dbContainerName;
executeNoVerify(command);
// clean up the images created in the tests
command = "docker rmi -f $(docker images -q '" + build_tag + "' | uniq)";
executeNoVerify(command);
}
@BeforeAll
static void staticPrepare() throws Exception {
logger.info("prepare for image tool test ...");
// verify that all the prerequisites are set and exist
validateEnvironmentSettings();
// clean up Docker instances leftover from a previous run
cleanup();
logger.info("Setting up the test environment ...");
if (!(new File(wlsImgBldDir)).exists()) {
logger.info(wlsImgBldDir + " does not exist, creating it");
if (!(new File(wlsImgBldDir)).mkdir()) {
throw new IllegalStateException("Unable to create build directory " + wlsImgBldDir);
}
}
// verify that required files/installers are available
verifyStagedFiles(JDK_INSTALLER, WLS_INSTALLER, WDT_INSTALLER, P27342434_INSTALLER, P28186730_INSTALLER,
FMW_INSTALLER, JDK_INSTALLER_NEWER);
// get Oracle support credentials
oracleSupportUsername = System.getenv("ORACLE_SUPPORT_USERNAME");
String oracleSupportPassword = System.getenv("ORACLE_SUPPORT_PASSWORD");
if (oracleSupportUsername == null || oracleSupportPassword == null) {
throw new Exception("Please set environment variables ORACLE_SUPPORT_USERNAME and ORACLE_SUPPORT_PASSWORD"
+ " for Oracle Support credentials to download the patches.");
}
logger.info("Building WDT archive ...");
Path scriptPath = Paths.get("src", "test", "resources", "wdt", "build-archive.sh");
String command = "sh " + scriptPath;
CommandResult result = executeAndVerify(command);
if (result.exitValue() != 0) {
logger.severe(result.stdout());
throw new IOException("Failed to build WDT Archive");
}
}
@AfterAll
static void staticUnprepare() throws Exception {
logger.info("cleaning up after the test ...");
cleanup();
}
/**
* Create the log directory in ./target (build folder), and open a new file using the test method's name.
*
* @param testInfo metadata from the test to be logged
* @return an output file wrapped in a PrintWriter
* @throws IOException if the PrintWriter fails to open the file
*/
private static PrintWriter getTestMethodWriter(TestInfo testInfo) throws IOException {
if (testInfo.getTestMethod().isPresent()) {
String methodName = testInfo.getTestMethod().get().getName();
// create a output file in the build folder with the name {test method name}.stdout
Path outputPath = Paths.get("target", "logs", methodName + ".out");
Files.createDirectories(outputPath.getParent());
logger.info("Test log: {0}", outputPath.toString());
return new PrintWriter(
new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputPath.toString()))), true);
} else {
throw new IllegalArgumentException("Method is not present in this context, and this method cannot be used");
}
}
private static String getMethodName(TestInfo testInfo) {
if (testInfo.getTestMethod().isPresent()) {
return testInfo.getTestMethod().get().getName();
} else {
throw new IllegalArgumentException("Cannot call getMethodName outside of test method");
}
}
private static CommandResult executeAndVerify(String command) throws Exception {
logger.info("Executing command: " + command);
CommandResult result = Runner.run(command);
assertEquals(0, result.exitValue(), "for command: " + command);
logger.info(result.stdout());
return result;
}
/**
* Determine if a Docker image exists on the local system.
*/
private static boolean imageExists(String imageTag) throws IOException, InterruptedException {
return !getImageId(imageTag).isEmpty();
}
/**
* Get the docker identifier for this image tag.
*/
private static String getImageId(String imageTag) throws IOException, InterruptedException {
return Runner.run("docker images -q " + imageTag).stdout().trim();
}
private void verifyFileInImage(String imagename, String filename, String expectedContent) throws Exception {
logger.info("verifying the file content in image");
String command = "docker run --rm " + imagename + " sh -c 'cat " + filename + "'";
logger.info("executing command: " + command);
CommandResult result = Runner.run(command);
if (!result.stdout().contains(expectedContent)) {
throw new Exception("The image " + imagename + " does not have the expected file content: "
+ expectedContent);
}
}
private void createDBContainer() throws Exception {
logger.info("Creating an Oracle db docker container ...");
String command = "docker rm -f " + dbContainerName;
Runner.run(command);
command = "docker run -d --name " + dbContainerName + " --env=\"DB_PDB=InfraPDB1\""
+ " --env=\"DB_DOMAIN=us.oracle.com\" --env=\"DB_BUNDLE=basic\" " + DB_IMAGE;
logger.info("executing command: " + command);
Runner.run(command);
// wait for the db is ready
command = "docker ps | grep " + dbContainerName;
checkCmdInLoop(command);
}
/**
* Test caching of an installer of type JDK.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(1)
@Tag("gate")
@Tag("cache")
@DisplayName("Add JDK installer to cache")
void cacheAddInstallerJdk(TestInfo testInfo) throws Exception {
Path jdkPath = Paths.get(STAGING_DIR, JDK_INSTALLER);
String command = new CacheCommand()
.addInstaller(true)
.type("jdk")
.version(JDK_VERSION)
.path(jdkPath)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
// the process return code for addInstaller should be 0
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the result
String listCommand = new CacheCommand().listItems(true).build();
CommandResult listResult = Runner.run(listCommand, out, logger);
// the process return code for listItems should be 0
assertEquals(0, listResult.exitValue(), "for command: " + listCommand);
// output should show newly added JDK installer
assertTrue(listResult.stdout().contains("jdk_" + JDK_VERSION + "=" + jdkPath));
}
}
/**
* Test caching of an installer of type WLS.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(2)
@Tag("gate")
@Tag("cache")
@DisplayName("Add WLS installer to cache")
void cacheAddInstallerWls(TestInfo testInfo) throws Exception {
Path wlsPath = Paths.get(STAGING_DIR, WLS_INSTALLER);
String command = new CacheCommand()
.addInstaller(true)
.type("wls")
.version(WLS_VERSION)
.path(wlsPath)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
// the process return code for addInstaller should be 0
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the result
String listCommand = new CacheCommand().listItems(true).build();
CommandResult listResult = Runner.run(listCommand, out, logger);
// the process return code for listItems should be 0
assertEquals(0, listResult.exitValue(), "for command: " + listCommand);
// output should show newly added WLS installer
assertTrue(listResult.stdout().contains("wls_" + WLS_VERSION + "=" + wlsPath));
}
}
/**
* Test manual caching of a patch JAR.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(3)
@Tag("gate")
@Tag("cache")
@DisplayName("Add patch 27342434 to cache")
void cacheAddPatch(TestInfo testInfo) throws Exception {
Path patchPath = Paths.get(STAGING_DIR, P27342434_INSTALLER);
String command = new CacheCommand()
.addPatch(true)
.path(patchPath)
.patchId(P27342434_ID, WLS_VERSION)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
// the process return code for addPatch should be 0
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the result
String listCommand = new CacheCommand().listItems(true).build();
CommandResult listResult = Runner.run(listCommand, out, logger);
// the process return code for listItems should be 0
assertEquals(0, listResult.exitValue(), "for command: " + listCommand);
// output should show newly added patch
assertTrue(listResult.stdout().contains(P27342434_ID + "_" + WLS_VERSION + "=" + patchPath));
}
}
/**
* add an entry to the cache.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(4)
@Tag("gate")
@DisplayName("Add manual entry to cache")
void cacheAddTestEntry(TestInfo testInfo) throws Exception {
Path testEntryValue = Paths.get(STAGING_DIR, P27342434_INSTALLER);
String command = new CacheCommand()
.addEntry(true)
.key(TEST_ENTRY_KEY)
.value(testEntryValue)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult addEntryResult = Runner.run(command, out, logger);
assertEquals(0, addEntryResult.exitValue(), "for command: " + command);
// verify the result
String listCommand = new CacheCommand().listItems(true).build();
CommandResult listResult = Runner.run(listCommand, out, logger);
// the process return code for listItems should be 0
assertEquals(0, listResult.exitValue(), "for command: " + listCommand);
// output should show newly added patch
assertTrue(listResult.stdout().contains(TEST_ENTRY_KEY.toLowerCase() + "=" + testEntryValue));
// cache should also contain the installer that was added in the previous test (persistent cache)
assertTrue(listResult.stdout().contains(P27342434_ID + "_" + WLS_VERSION + "="));
}
}
/**
* test delete an entry from the cache.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(5)
@Tag("gate")
@DisplayName("Delete cache entry")
void cacheDeleteTestEntry(TestInfo testInfo) throws Exception {
String command = new CacheCommand()
.deleteEntry(true)
.key(TEST_ENTRY_KEY)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the result
String listCommand = new CacheCommand().listItems(true).build();
CommandResult listResult = Runner.run(listCommand, out, logger);
// the process return code for listItems should be 0
assertEquals(0, listResult.exitValue(), "for command: " + listCommand);
// output should NOT show deleted patch
assertFalse(listResult.stdout().contains(TEST_ENTRY_KEY.toLowerCase()));
}
}
/**
* Test manual caching of a patch JAR.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(6)
@Tag("gate")
@Tag("cache")
@DisplayName("Add OPatch patch to cache")
void cacheOpatch(TestInfo testInfo) throws Exception {
Path patchPath = Paths.get(STAGING_DIR, P28186730_INSTALLER);
String command = new CacheCommand()
.addPatch(true)
.path(patchPath)
.patchId(P28186730_ID, OPATCH_VERSION)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
// the process return code for addPatch should be 0
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the result
String listCommand = new CacheCommand().listItems(true).build();
CommandResult listResult = Runner.run(listCommand, out, logger);
// the process return code for listItems should be 0
assertEquals(0, listResult.exitValue(), "for command: " + listCommand);
// output should show newly added patch
assertTrue(listResult.stdout().contains(P28186730_ID + "_" + OPATCH_VERSION + "=" + patchPath));
}
}
/**
* Add WDT installer to the cache.
* @throws IOException if getting a file to write the command output fails
* @throws InterruptedException if running the Java command fails
*/
@Test
@Order(7)
@Tag("gate")
@Tag("cache")
@DisplayName("Add WDT installer to cache")
void cacheAddInstallerWdt(TestInfo testInfo) throws IOException, InterruptedException {
// add WDT installer to the cache
Path wdtPath = Paths.get(STAGING_DIR, WDT_INSTALLER);
String addCommand = new CacheCommand()
.addInstaller(true)
.type("wdt")
.version(WDT_VERSION)
.path(wdtPath)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult addResult = Runner.run(addCommand, out, logger);
// the process return code for addInstaller should be 0
assertEquals(0, addResult.exitValue(), "for command: " + addCommand);
}
}
/**
* create a WLS image with default WLS version.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(10)
@Tag("gate")
@DisplayName("Create default WebLogic Server image")
void createWlsImg(TestInfo testInfo) throws Exception {
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new CreateCommand().tag(tagName).build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
wlsImgBuilt = true;
}
}
/**
* update a WLS image with a patch.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(11)
@Tag("gate")
@DisplayName("Update createWlsImg with patch 27342434")
void updateWlsImg(TestInfo testInfo) throws Exception {
assumeTrue(wlsImgBuilt);
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new UpdateCommand()
.fromImage(build_tag + ":createWlsImg")
.tag(tagName)
.patches(P27342434_ID)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
// TODO should check that patch and OPatch were applied
}
}
/**
* create a WLS image using WebLogic Deploying Tool.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(12)
@Tag("gate")
@DisplayName("Create WLS image with WDT domain")
void createWlsImgUsingWdt(TestInfo testInfo) throws Exception {
try (PrintWriter out = getTestMethodWriter(testInfo)) {
String tagName = build_tag + ":" + getMethodName(testInfo);
// create a WLS image with a domain
String command = new CreateCommand()
.tag(tagName)
.patches(P27342434_ID)
.wdtVersion(WDT_VERSION)
.wdtModel(WDT_MODEL)
.wdtArchive(WDT_ARCHIVE)
.wdtDomainHome("/u01/domains/simple_domain")
.wdtVariables(WDT_VARIABLES)
.build();
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
domainImgBuilt = true;
}
}
/**
* Use the Rebase function to move a domain to a new image.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(13)
@Tag("gate")
@DisplayName("Rebase the WLS domain")
void rebaseWlsImg(TestInfo testInfo) throws Exception {
assumeTrue(wlsImgBuilt);
assumeTrue(domainImgBuilt);
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new RebaseCommand()
.sourceImage(build_tag, "createWlsImgUsingWdt")
.targetImage(build_tag, "updateWlsImg")
.tag(tagName)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
}
}
/**
* Create an image with WDT Model on OL 8-slim
*
* @throws Exception - if any error occurs
*/
@Test
@Order(14)
@Tag("gate")
@DisplayName("Create Model in Image with OL 8-slim")
void createMiiOl8slim(TestInfo testInfo) throws Exception {
// test assumes that WDT installer is already in the cache from previous test
// test assumes that the WLS 192.168.3.11 installer is already in the cache
// test assumes that the default JDK version 8u202 is already in the cache
Path tmpWdtModel = Paths.get(wlsImgBldDir, WDT_MODEL1);
// update wdt model file
Files.copy(WDT_RESOURCES.resolve(WDT_MODEL1), tmpWdtModel, StandardCopyOption.REPLACE_EXISTING);
try (PrintWriter out = getTestMethodWriter(testInfo)) {
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new CreateCommand()
.tag(tagName)
.fromImage("ghcr.io/oracle/oraclelinux", "8-slim")
.version(WLS_VERSION)
.wdtVersion(WDT_VERSION)
.wdtArchive(WDT_ARCHIVE)
.wdtModel(tmpWdtModel)
.wdtModelOnly(true)
.type("wls")
.build();
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
verifyFilePermissions("/u01/domains", "drwxr-xr-x", tagName, out);
verifyFilePermissions("/u01/wdt", "drwxr-xr-x", tagName, out);
verifyFilePermissions("/u01/wdt/models", "drwxr-xr-x", tagName, out);
verifyFilePermissions("/u01/wdt/weblogic-deploy", "drwxr-x---", tagName, out);
verifyFilePermissions("/u01/oracle", "drwxr-xr-x", tagName, out);
verifyFilePermissions("/u01/wdt/weblogic-deploy/bin/createDomain.sh", "-rwxr-x---", tagName, out);
verifyFilePermissions("/u01/wdt/weblogic-deploy/bin/validateModel.sh", "-rwxr-x---", tagName, out);
}
}
/**
* Use the Rebase function to move a domain to a new image.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(15)
@Tag("gate")
@DisplayName("Create Aux Image")
void createAuxImage(TestInfo testInfo) throws Exception {
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new CreateAuxCommand()
.tag(tagName)
.wdtModel(WDT_MODEL)
.wdtArchive(WDT_ARCHIVE)
.wdtVersion(WDT_VERSION)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
// verify the file created in [before-jdk-install] section
verifyFileInImage(tagName, "/auxiliary/models/simple-topology.yaml", "AdminUserName: weblogic");
verifyFilePermissions("/auxiliary/models/archive.zip", "-rw-r-----", tagName, out);
verifyFilePermissions("/auxiliary/models/archive.zip", "-rw-r-----", tagName, out);
verifyFilePermissions("/auxiliary/weblogic-deploy/bin/createDomain.sh", "-rwxr-x---", tagName, out);
}
}
/**
* Create a FMW image with internet access to download PSU.
* Oracle Support credentials must be provided to download the patches.
* Uses different JDK version from the default in the Image Tool.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(20)
@Tag("nightly")
@DisplayName("Create FMW 12.2.1.3 image with latest PSU")
void createFmwImgFullInternetAccess(TestInfo testInfo) throws Exception {
// add jdk 8u212 installer to the cache
String addNewJdkCmd = new CacheCommand().addInstaller(true)
.type("jdk")
.version(JDK_VERSION_8u212)
.path(Paths.get(STAGING_DIR, JDK_INSTALLER_NEWER))
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult addNewJdkResult = Runner.run(addNewJdkCmd, out, logger);
// the process return code for addInstaller should be 0
assertEquals(0, addNewJdkResult.exitValue(), "for command: " + addNewJdkCmd);
// add fmw installer to the cache
String addCommand = new CacheCommand()
.addInstaller(true)
.type("fmw")
.version(WLS_VERSION)
.path(Paths.get(STAGING_DIR, FMW_INSTALLER))
.build();
CommandResult addResult = Runner.run(addCommand, out, logger);
// the process return code for addInstaller should be 0
assertEquals(0, addResult.exitValue(), "for command: " + addCommand);
String tagName = build_tag + ":" + getMethodName(testInfo);
// create an an image with FMW and the latest PSU using ARU to download the patch
String command = new CreateCommand()
.tag(tagName)
.jdkVersion(JDK_VERSION_8u212)
.type("fmw")
.user(oracleSupportUsername)
.passwordEnv("<PASSWORD>")
.latestPsu(true)
.build();
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
}
}
/**
* create a JRF domain image using WDT
* You need to have OCR credentials to pull container-registry.oracle.com/database/enterprise:12.2.0.1-slim
*
* @throws Exception - if any error occurs
*/
@Test
@Order(22)
@Tag("nightly")
@DisplayName("Create FMW 192.168.3.11 image with WDT domain")
void createJrfDomainImgUsingWdt(TestInfo testInfo) throws Exception {
// create a db container for RCU
createDBContainer();
// test assumes that WDT installer is already in the cache from previous test
// test assumes that the FMW 192.168.3.11 installer is already in the cache
// test assumes that the default JDK version 8u202 is already in the cache
Path tmpWdtModel = Paths.get(wlsImgBldDir, WDT_MODEL1);
// update wdt model file
Files.copy(WDT_RESOURCES.resolve(WDT_MODEL1), tmpWdtModel, StandardCopyOption.REPLACE_EXISTING);
String getDbContainerIp = "docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "
+ dbContainerName;
try (PrintWriter out = getTestMethodWriter(testInfo)) {
String host = Runner.run(getDbContainerIp, out, logger).stdout().trim();
logger.info("Setting WDT Model DB_HOST to {0}", host);
String content = new String(Files.readAllBytes(tmpWdtModel));
content = content.replaceAll("%DB_HOST%", host);
Files.write(tmpWdtModel, content.getBytes());
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new CreateCommand()
.tag(tagName)
.version(WLS_VERSION)
.wdtVersion(WDT_VERSION)
.wdtArchive(WDT_ARCHIVE)
.wdtDomainHome("/u01/domains/simple_domain")
.wdtModel(tmpWdtModel)
.wdtDomainType("JRF")
.wdtRunRcu(true)
.type("fmw")
.build();
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
}
}
/**
* create a RestrictedJRF domain image using WDT.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(23)
@Tag("nightly")
@DisplayName("Create FMW image with WDT domain and latestPSU with new base img")
void createRestrictedJrfDomainImgUsingWdt(TestInfo testInfo) throws Exception {
// test assumes that the FMW 192.168.3.11 installer is already in the cache
// test assumes that the default JDK version 8u202 is already in the cache
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new CreateCommand()
.tag(tagName)
.version(WLS_VERSION)
.latestPsu(true)
.user(oracleSupportUsername)
.passwordEnv("<PASSWORD>")
.wdtVersion(WDT_VERSION)
.wdtModel(WDT_MODEL)
.wdtArchive(WDT_ARCHIVE)
.wdtVariables(WDT_VARIABLES)
.wdtDomainHome("/u01/domains/simple_domain")
.wdtDomainType("RestrictedJRF")
.type("fmw")
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
}
}
/**
* create wls image using multiple WDT model files.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(24)
@Tag("nightly")
@DisplayName("Create WLS image with WDT and multiple models")
void createWlsImgUsingMultiModels(TestInfo testInfo) throws Exception {
// test assumes that the WLS 12.2.1.3 installer is already in the cache
// test assumes that the default JDK installer is already in the cache
// test assumes that the WDT installer is already in the cache
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new CreateCommand()
.tag(tagName)
.version(WLS_VERSION)
.wdtVersion(WDT_VERSION)
.wdtArchive(WDT_ARCHIVE)
.wdtDomainHome("/u01/domains/simple_domain")
.wdtModel(WDT_MODEL, WDT_MODEL2)
.wdtVariables(WDT_VARIABLES)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
}
}
/**
* create WLS image with additional build commands.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(25)
@Tag("nightly")
@DisplayName("Create image with additionalBuildCommands and recommendedPatches")
void createWlsImgWithAdditionalBuildCommands(TestInfo testInfo) throws Exception {
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new CreateCommand()
.jdkVersion(JDK_VERSION)
.tag(tagName)
.recommendedPatches(true)
.user(oracleSupportUsername)
.passwordEnv("<PASSWORD>")
.additionalBuildCommands(WDT_RESOURCES.resolve("multi-sections.txt"))
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
}
// verify the file created in [before-jdk-install] section
verifyFileInImage(tagName, "/u01/jdk/beforeJDKInstall.txt", "before-jdk-install");
// verify the file created in [after-jdk-install] section
verifyFileInImage(tagName, "/u01/jdk/afterJDKInstall.txt", "after-jdk-install");
// verify the file created in [before-fmw-install] section
verifyFileInImage(tagName, "/u01/oracle/beforeFMWInstall.txt", "before-fmw-install");
// verify the file created in [after-fmw-install] section
verifyFileInImage(tagName, "/u01/oracle/afterFMWInstall.txt", "after-fmw-install");
// verify the label is created as in [final-build-commands] section
CommandResult inspect = Runner.run("docker inspect --format '{{ index .Config.Labels}}' " + tagName);
assertTrue(inspect.stdout().contains("final-build-commands:finalBuildCommands"),
tagName + " does not contain the expected label");
}
/**
* Create a WLS image using Java Server JRE image as a base.
* This tests that the JAVA_HOME is correctly identified and applied in the CREATE.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(26)
@Tag("nightly")
@DisplayName("Create image with WLS using Java ServerJRE")
void createImageWithServerJRE(TestInfo testInfo) throws Exception {
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new CreateCommand()
.tag(tagName)
.fromImage("container-registry.oracle.com/java/serverjre:8")
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
}
}
/**
* Update the WLS image created in the previous test.
* This tests that the JAVA_HOME is correctly identified and applied in the UPDATE.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(27)
@Tag("nightly")
@DisplayName("Update image with WLS using Java ServerJRE")
void updateImageWithServerJRE(TestInfo testInfo) throws Exception {
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new UpdateCommand()
.fromImage(build_tag + ":createImageWithServerJRE")
.tag(tagName)
.patches(P27342434_ID)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
}
}
/**
* Verify file permissions for a specified path on the given image.
* @param path Filename or Directory to check for permissions value.
* @param expected Expected permission string, such as "drwxrwxr-x"
* @param tagName Tag name or image ID of the image to inspect
* @param out The printwriter where the docker run command will send stdout/stderr
* @throws IOException if process start fails
* @throws InterruptedException if the wait is interrupted before the process completes
*/
private void verifyFilePermissions(String path, String expected, String tagName, PrintWriter out)
throws IOException, InterruptedException {
String command = String.format(" docker run --rm -t %s ls -ld %s", tagName, path);
String actual = Runner.run(command, out, logger).stdout().trim();
String[] tokens = actual.split(" ", 2);
assertEquals(2, tokens.length, "Unable to get file permissions for " + path);
// When running on an SELinux host, the permissions shown by ls will end with a "."
assertEquals(expected, tokens[0].substring(0,expected.length()), "Incorrect file permissions for " + path);
}
/**
* update a WLS image with a model.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(28)
@Tag("nightly")
@DisplayName("Use Update to add a WDT model to createWlsImg")
void updateAddModel(TestInfo testInfo) throws Exception {
assumeTrue(wlsImgBuilt);
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new UpdateCommand()
.fromImage(build_tag + ":createWlsImg") //from step 10, createWlsImg()
.tag(tagName)
.wdtVersion(WDT_VERSION)
.wdtModel(WDT_MODEL)
.wdtVariables(WDT_VARIABLES)
.wdtArchive(WDT_ARCHIVE)
.wdtModelOnly(true)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
verifyFilePermissions("/u01/wdt/weblogic-deploy/bin/createDomain.sh", "-rwxr-x---", tagName, out);
verifyFilePermissions("/u01/wdt/weblogic-deploy/bin/validateModel.sh", "-rwxr-x---", tagName, out);
}
}
/**
* update a WLS image with another model.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(29)
@Tag("nightly")
@DisplayName("Use Update to add a second WDT model to createWlsImg")
void updateAddSecondModel(TestInfo testInfo) throws Exception {
String testFromImage = build_tag + ":updateAddModel";
// skip this test if updateAddModel() failed to create an image
assumeTrue(imageExists(testFromImage));
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new UpdateCommand()
.fromImage(testFromImage)
.tag(tagName)
.wdtVersion(WDT_VERSION)
.wdtModel(WDT_MODEL2)
.wdtModelOnly(true)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
}
}
/**
* create WLS image with OpenShift settings.
*
* @throws Exception - if any error occurs
*/
@Test
@Order(30)
@Tag("nightly")
@DisplayName("Create image with OpenShift settings")
void createWlsImgWithOpenShiftSettings(TestInfo testInfo) throws Exception {
String tagName = build_tag + ":" + getMethodName(testInfo);
String command = new CreateCommand()
.jdkVersion(JDK_VERSION)
.tag(tagName)
.wdtVersion(WDT_VERSION)
.wdtArchive(WDT_ARCHIVE)
.wdtDomainHome("/u01/domains/simple_domain")
.wdtModel(WDT_MODEL, WDT_MODEL2)
.wdtVariables(WDT_VARIABLES)
.target(KubernetesTarget.OpenShift)
.build();
try (PrintWriter out = getTestMethodWriter(testInfo)) {
CommandResult result = Runner.run(command, out, logger);
assertEquals(0, result.exitValue(), "for command: " + command);
// verify the docker image is created
assertTrue(imageExists(tagName), "Image was not created: " + tagName);
// verify the file permissions on the domain directory were set correctly
verifyFilePermissions("/u01/domains/simple_domain", "drwxrwxr-x", tagName, out);
}
}
}
|
symcomp/org.symcomp.openmath | src/main/java/org/symcomp/openmath/popcorn/symbols/Transc1.java | <reponame>symcomp/org.symcomp.openmath<filename>src/main/java/org/symcomp/openmath/popcorn/symbols/Transc1.java<gh_stars>1-10
//---------------------------------------------------------------------------
// Copyright 2006-2009
// <NAME>, <EMAIL>, (TU Eindhoven, Netherlands)
// <NAME>, <EMAIL> (University Kassel, Germany)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//---------------------------------------------------------------------------
package org.symcomp.openmath.popcorn.symbols;
import org.symcomp.openmath.SymbolRenderer;
import org.symcomp.openmath.AbstractRenderer;
import java.io.IOException;
import java.io.Writer;
public class Transc1 extends SymbolRenderer {
public Transc1(AbstractRenderer renderer) {
super(renderer);
}
public void arccos() throws IOException { out.write("arccos"); }
public void arccosh() throws IOException { out.write("arccosh"); }
public void arccot() throws IOException { out.write("arccot"); }
public void arccoth() throws IOException { out.write("arccoth"); }
public void arccsc() throws IOException { out.write("arccsc"); }
public void arccsch() throws IOException { out.write("arccsch"); }
public void arcsec() throws IOException { out.write("arcsec"); }
public void arcsech() throws IOException { out.write("arcsech"); }
public void arcsin() throws IOException { out.write("arcsin"); }
public void arcsinh() throws IOException { out.write("arcsinh"); }
public void arctan() throws IOException { out.write("arctan"); }
public void arctanh() throws IOException { out.write("arctanh"); }
public void cos() throws IOException { out.write("cos"); }
public void cosh() throws IOException { out.write("cosh"); }
public void cot() throws IOException { out.write("cot"); }
public void coth() throws IOException { out.write("coth"); }
public void csc() throws IOException { out.write("csc"); }
public void csch() throws IOException { out.write("csch"); }
public void exp() throws IOException { out.write("exp"); }
public void ln() throws IOException { out.write("ln"); }
public void log() throws IOException { out.write("log"); }
public void sec() throws IOException { out.write("sec"); }
public void sech() throws IOException { out.write("sech"); }
public void sin() throws IOException { out.write("sin"); }
public void sinh() throws IOException { out.write("sinh"); }
public void tan() throws IOException { out.write("tan"); }
public void tanh() throws IOException { out.write("tanh"); }
}
|
Groundswell/swell_ecom | app/views/swell_ecom/subscription_plans/show.json.jbuilder |
json.title @plan.title
json.avatar @plan.avatar
json.description @plan.description
json.content @plan.content
json.price number_to_currency( @plan.price / 100.to_f )
json.price_in_cents @plan.price
json.billing_interval @plan.billing_interval_value.to_s + " " + @plan.billing_interval_unit
json.trial_interval @plan.trial_interval_value.to_s + " " + @plan.trial_interval_unit
|
unification-com/mainchain-cosmos | x/enterprise/client/cli/query.go | package cli
import (
"context"
"fmt"
"strconv"
"strings"
"github.com/spf13/cobra"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/flags"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/version"
entutils "github.com/unification-com/mainchain/x/enterprise/client/utils"
"github.com/unification-com/mainchain/x/enterprise/types"
)
// GetQueryCmd returns the cli query commands for this module
func GetQueryCmd() *cobra.Command {
enterpriseQueryCmd := &cobra.Command{
Use: types.ModuleName,
Short: "Querying commands for the enterprise module",
DisableFlagParsing: true,
SuggestionsMinimumDistance: 2,
RunE: client.ValidateCmd,
}
enterpriseQueryCmd.AddCommand(
GetCmdQueryParams(),
GetCmdGetPurchaseOrders(),
GetCmdGetPurchaseOrderByID(),
GetCmdGetLockedUndByAddress(),
GetCmdQueryTotalLocked(),
GetCmdQueryTotalUnlocked(),
GetCmdGetWhitelistedAddresses(),
GetCmdGetAddresIsWhitelisted(),
GetCmdGetEnterpriseUserAccount(),
GetCmdGetEnterpriseSupply(),
)
return enterpriseQueryCmd
}
// GetCmdQueryParams implements a command to return the current enterprise FUND
// parameters.
func GetCmdQueryParams() *cobra.Command {
cmd := &cobra.Command{
Use: "params",
Short: "Query the current enterprise FUND parameters",
Long: strings.TrimSpace(
fmt.Sprintf(`Query all the current enterprise FUND parameters.
Example:
$ %s query enterprise params
`,
version.AppName,
),
),
Args: cobra.NoArgs,
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
queryClient := types.NewQueryClient(clientCtx)
// Query store for all params
params, err := queryClient.Params(
context.Background(),
&types.QueryParamsRequest{},
)
if err != nil {
return err
}
return clientCtx.PrintObjectLegacy(params)
},
}
flags.AddQueryFlagsToCmd(cmd)
return cmd
}
// GetCmdGetPurchaseOrders queries a list of all purchase orders
func GetCmdGetPurchaseOrders() *cobra.Command {
cmd := &cobra.Command{
Use: "orders",
Short: "Query Enterprise FUND purchase orders with optional filters",
Long: strings.TrimSpace(
fmt.Sprintf(`Query for a all paginated Enterprise FUND purchase orders that match optional filters:
Example:
$ %s query enterprise orders --status (raised|accept|reject|complete)
$ %s query enterprise orders --purchaser und1chknpc8nf2tmj5582vhlvphnjyekc9ypspx5ay
$ %s query enterprise orders --page=2 --limit=100
`,
version.AppName, version.AppName, version.AppName,
),
),
RunE: func(cmd *cobra.Command, args []string) error {
strProposalStatus, _ := cmd.Flags().GetString(FlagPurchaseOrderStatus)
purchaserAddr, _ := cmd.Flags().GetString(FlagPurchaser)
statusNorm := entutils.NormalisePurchaseOrderStatus(strProposalStatus)
status, err := types.PurchaseOrderStatusFromString(statusNorm)
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
queryClient := types.NewQueryClient(clientCtx)
pageReq, err := client.ReadPageRequest(cmd.Flags())
params := &types.QueryEnterpriseUndPurchaseOrdersRequest{
Pagination: pageReq,
}
if status != types.StatusNil {
params.Status = status
}
if len(purchaserAddr) > 0 {
params.Purchaser = purchaserAddr
}
res, err := queryClient.EnterpriseUndPurchaseOrders(context.Background(), params)
if err != nil {
return err
}
return clientCtx.PrintProto(res)
},
}
cmd.Flags().String(FlagPurchaseOrderStatus, "", "(optional) filter purchase orders by status, status: raised/accept/reject/complete")
cmd.Flags().String(FlagPurchaser, "", "(optional) filter purchase orders raised by address")
flags.AddQueryFlagsToCmd(cmd)
flags.AddPaginationFlagsToCmd(cmd, "purchase orders")
return cmd
}
// GetCmdGetPurchaseOrderByID queries a purchase order given an ID
func GetCmdGetPurchaseOrderByID() *cobra.Command {
cmd := &cobra.Command{
Use: "order [purchase_order_id]",
Short: "get a purchase order by ID",
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
queryClient := types.NewQueryClient(clientCtx)
// validate that the proposal id is a uint
purchaseOrderId, err := strconv.ParseUint(args[0], 10, 64)
if err != nil {
return fmt.Errorf("purchase_order_id %s not a valid int, please input a valid purchase_order_id", args[0])
}
res, err := queryClient.EnterpriseUndPurchaseOrder(context.Background(), &types.QueryEnterpriseUndPurchaseOrderRequest{
PurchaseOrderId: purchaseOrderId,
})
if err != nil {
return err
}
return clientCtx.PrintProto(res)
},
}
flags.AddQueryFlagsToCmd(cmd)
return cmd
}
// GetCmdGetLockedUndByAddress queries locked FUND for a given address
func GetCmdGetLockedUndByAddress() *cobra.Command {
cmd := &cobra.Command{
Use: "locked [address]",
Short: "get locked FUND for an address",
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
queryClient := types.NewQueryClient(clientCtx)
// validate that the proposal id is a uint
purchaser, err := sdk.AccAddressFromBech32(args[0])
if err != nil {
return err
}
res, err := queryClient.LockedUndByAddress(context.Background(), &types.QueryLockedUndByAddressRequest{
Owner: purchaser.String(),
})
if err != nil {
return err
}
return clientCtx.PrintProto(res)
},
}
flags.AddQueryFlagsToCmd(cmd)
return cmd
}
// GetCmdQueryTotalLocked implements a command to return the current total locked enterprise und
func GetCmdQueryTotalLocked() *cobra.Command {
cmd := &cobra.Command{
Use: "total-locked",
Short: "Query the current total locked enterprise FUND",
Args: cobra.NoArgs,
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
queryClient := types.NewQueryClient(clientCtx)
res, err := queryClient.TotalLocked(context.Background(), &types.QueryTotalLockedRequest{})
if err != nil {
return err
}
return clientCtx.PrintProto(res)
},
}
flags.AddQueryFlagsToCmd(cmd)
return cmd
}
// GetCmdQueryTotalUnlocked implements a command to return the current total locked enterprise und
func GetCmdQueryTotalUnlocked() *cobra.Command {
cmd := &cobra.Command{
Use: "total-unlocked",
Short: "Query the current total unlocked und in circulation",
Args: cobra.NoArgs,
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
queryClient := types.NewQueryClient(clientCtx)
res, err := queryClient.TotalUnlocked(context.Background(), &types.QueryTotalUnlockedRequest{})
if err != nil {
return err
}
return clientCtx.PrintProto(res)
},
}
flags.AddQueryFlagsToCmd(cmd)
return cmd
}
// GetCmdGetWhitelistedAddresses queries all addresses whitelisted for raising enterprise und purchase orders
func GetCmdGetWhitelistedAddresses() *cobra.Command {
cmd := &cobra.Command{
Use: "whitelist",
Short: "get addresses whitelisted for raising enterprise purchase orders",
Args: cobra.NoArgs,
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
queryClient := types.NewQueryClient(clientCtx)
res, err := queryClient.Whitelist(context.Background(), &types.QueryWhitelistRequest{})
if err != nil {
return err
}
return clientCtx.PrintProto(res)
},
}
flags.AddQueryFlagsToCmd(cmd)
return cmd
}
// GetCmdGetLockedUndByAddress queries locked FUND for a given address
func GetCmdGetAddresIsWhitelisted() *cobra.Command {
cmd := &cobra.Command{
Use: "whitelisted [address]",
Short: "check if given address is whitelested for purchase orders",
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
queryClient := types.NewQueryClient(clientCtx)
address, err := sdk.AccAddressFromBech32(args[0])
if err != nil {
return err
}
res, err := queryClient.Whitelisted(context.Background(), &types.QueryWhitelistedRequest{
Address: address.String(),
})
if err != nil {
return err
}
return clientCtx.PrintProto(res)
},
}
flags.AddQueryFlagsToCmd(cmd)
return cmd
}
// GetCmdGetLockedUndByAddress queries locked FUND for a given address
func GetCmdGetEnterpriseUserAccount() *cobra.Command {
cmd := &cobra.Command{
Use: "account [address]",
Short: "get data about an address - locked, unlocked and total FUND",
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
queryClient := types.NewQueryClient(clientCtx)
address, err := sdk.AccAddressFromBech32(args[0])
if err != nil {
return err
}
res, err := queryClient.EnterpriseAccount(context.Background(), &types.QueryEnterpriseAccountRequest{
Address: address.String(),
})
if err != nil {
return err
}
return clientCtx.PrintProto(res)
},
}
flags.AddQueryFlagsToCmd(cmd)
return cmd
}
// GetCmdGetEnterpriseSupply queries eFUND data, including locked, unlocked and total chain supply
func GetCmdGetEnterpriseSupply() *cobra.Command {
cmd := &cobra.Command{
Use: "ent-supply",
Short: "get eFUND data, including locked, unlocked and chain total supply",
Args: cobra.NoArgs,
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx, err := client.GetClientQueryContext(cmd)
if err != nil {
return err
}
queryClient := types.NewQueryClient(clientCtx)
res, err := queryClient.EnterpriseSupply(context.Background(), &types.QueryEnterpriseSupplyRequest{})
if err != nil {
return err
}
return clientCtx.PrintProto(res)
},
}
flags.AddQueryFlagsToCmd(cmd)
return cmd
}
|
itc7/alipay-sdk-java-all | src/main/java/com/alipay/api/response/AlipayMerchantPayforprivilegeMemberremainingQueryResponse.java | <gh_stars>0
package com.alipay.api.response;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.merchant.payforprivilege.memberremaining.query response.
*
* @author <NAME>
* @since 1.0, 2020-04-08 11:35:33
*/
public class AlipayMerchantPayforprivilegeMemberremainingQueryResponse extends AlipayResponse {
private static final long serialVersionUID = 7781181882464542276L;
/**
* 余额中未使用的权益金部分
*/
@ApiField("unused_benefit")
private String unusedBenefit;
/**
* 余额中未使用的本金部分
*/
@ApiField("unused_principal")
private String unusedPrincipal;
public void setUnusedBenefit(String unusedBenefit) {
this.unusedBenefit = unusedBenefit;
}
public String getUnusedBenefit( ) {
return this.unusedBenefit;
}
public void setUnusedPrincipal(String unusedPrincipal) {
this.unusedPrincipal = unusedPrincipal;
}
public String getUnusedPrincipal( ) {
return this.unusedPrincipal;
}
}
|
TheThirdPillar/Identity | identity-web-app/components/WellBeingCard.js | <filename>identity-web-app/components/WellBeingCard.js<gh_stars>0
import Cookies from 'js-cookie'
import Row from 'react-bootstrap/Row'
import Col from 'react-bootstrap/Col'
import Card from 'react-bootstrap/Card'
import CardDeck from 'react-bootstrap/CardDeck'
import Button from 'react-bootstrap/Button'
import ProgressBar from 'react-bootstrap/ProgressBar'
import styles from '../styles/Dashboard.module.css'
import { domain } from '../config/config'
import { FaLock } from 'react-icons/fa'
export default function WellBeingCard (props) {
// TODO: Find better way to do this.
// TODO: Pre-calculate and change calculateScore implementation.
let physiologyStack = props.stacks?.find(stack => {
return stack.stackName === 'physiology'
})
let energyStack = props.stacks?.find(stack => {
return stack.stackName === 'energy'
})
let feelingStack = props.stacks?.find(stack => {
return stack.stackName === 'feeling'
})
let thinkingStack = props.stacks?.find(stack => {
return stack.stackName === 'thinking'
})
let behaviorStack = props.stacks?.find(stack => {
return stack.stackName === 'behavior'
})
let resultStack = props.stacks?.find(stack => {
return stack.stackName === 'result'
})
const calculateScore = (ratings) => {
if (!ratings) return 0
let score = 0
for (var q of Object.keys(ratings)) {
score = score + Number(ratings[q])
}
return ((score * 10) / 4)
}
const requestValidation = () => {
// TODO: Let user pick community for validation
let validator = 'pranag'
let validatingCommunity = '602bf366af0d03643f769724'
let requestBody = {}
requestBody.stacks = props.stacks
requestBody.validator = validator
requestBody.validatingCommunity = validatingCommunity
fetch(domain + '/application/listen/identity/requestStackValidation', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + Cookies.get('token')
},
body: JSON.stringify(requestBody)
})
.then(response => response.json())
.then(data => {
console.log(data)
if (data.status && data.status == "SUCCESS") {
props.handleValidationRequest(() => data.updatedIdentity?.wellBeingValidation)
}
})
}
// TODO: Never validated and validated X days ago for button and
// TODO: footer text.
return (
<Row className="justify-content-center">
<Col xs={12} md={12} lg={12}>
<CardDeck className="m-2">
<Card className="text-center bg-dark text-white">
<Card.Body>
<Card.Title>Overall Score</Card.Title>
<Card.Text className={styles.wellBeingScore + " font-weight-bold mt-4"}>
{
(props.isPublic)
? <FaLock />
: <span className={styles.wellBeingScoreTotal}>{Math.ceil(props.score)}/100</span>
}
</Card.Text>
{
(props.isPublic)
? ""
: <Button size="sm" variant="warning" onClick={() => requestValidation()} disabled={props.validation?.validationStatus == "pending"}>
{(props.validation?.validationStatus == "pending") ? "Pending Validation"
: "Request Validation"}
</Button>
}
</Card.Body>
<Card.Footer className="text-muted">
Last validated: {(props.validation && props.validation.validationDate) ? new Date(props.validation.validationDate) : "Not validated yet"}
</Card.Footer>
</Card>
<Card className="bg-dark text-white p-1">
<Card.Body>
{
(props.isPublic)
?
<Card.Text className={styles.wellBeingScore + " font-weight-bold mt-4 text-center"}>
<FaLock />
</Card.Text>
:
<Row>
<Col xs={6} md={6} lg={6}>
<Card.Text className="m-0 mt-2">
Physiology
</Card.Text>
<ProgressBar now={calculateScore(physiologyStack?.stackRatings)} label={calculateScore(physiologyStack?.stackRatings) + "%"} variant="warning" className="mb-1" />
<Card.Text className="m-0 mt-2">
Emotions
</Card.Text>
<ProgressBar now={calculateScore(energyStack?.stackRatings)} label={calculateScore(energyStack?.stackRatings) + "%"} variant="warning" className="mb-1" />
<Card.Text className="m-0 mt-2">
Feeling
</Card.Text>
<ProgressBar now={calculateScore(feelingStack?.stackRatings)} label={calculateScore(feelingStack?.stackRatings) + "%"} variant="warning" className="mb-1" />
</Col>
<Col xs={6} md={6} lg={6}>
<Card.Text className="m-0 mt-2">
Thinking
</Card.Text>
<ProgressBar now={calculateScore(thinkingStack?.stackRatings)} label={calculateScore(thinkingStack?.stackRatings) + "%"} variant="warning" className="mb-1" />
<Card.Text className="m-0 mt-2">
Behavior
</Card.Text>
<ProgressBar now={calculateScore(behaviorStack?.stackRatings)} label={calculateScore(behaviorStack?.stackRatings) + "%"} variant="warning" className="mb-1" />
<Card.Text className="m-0 mt-2">
Results
</Card.Text>
<ProgressBar now={calculateScore(resultStack?.stackRatings)} label={calculateScore(resultStack?.stackRatings) + "%"} variant="warning" className="mb-1" />
</Col>
</Row>
}
</Card.Body>
<Card.Footer className="text-muted text-center">Productivity Stack</Card.Footer>
</Card>
</CardDeck>
</Col>
</Row>
)
} |
lhiuming/REI | tests/test_d3d11.cpp | // D3D11 tutorial from www.braynzarsoft.net, with modification and additional comments
// Include and link appropriate libraries and headers //
#include <DirectXMath.h> // new math lib from d3d11
#include <d3d11.h>
#include <d3dcompiler.h> // new shader compiler from d3d11; D3DX is deprecated since windows 8
#include <windows.h>
// D3D Interfaces
IDXGISwapChain* SwapChain;
ID3D11Device* d3d11Device;
ID3D11DeviceContext* d3d11DevCon;
ID3D11RenderTargetView* renderTargetView;
ID3D11DepthStencilView* depthSentcilView; // store the depth/stencil view
ID3D11Texture2D* depthStencilBuffer; // 2D texture object to store the depth/stentil buffer
// Rendering objects
ID3D11Buffer* cubeIndexBuffer; // buffuer to hold index (for drawing primitives based on vertec
ID3D11Buffer* cubeVertBuffer;
ID3D11VertexShader* VS;
ID3D11PixelShader* PS;
ID3DBlob* VS_Buffer; // not using ID3D10Blob; new from d3dcompiler
ID3DBlob* PS_Buffer;
ID3D11InputLayout* vertLayout;
ID3D11Buffer* cbPerObjectBuffer; // buffer to store per-object tramsform matrix
ID3D11Buffer* cbPerFrameBuffer; // buffer to hold frame-wide constant data
// Some math data for transform
DirectX::XMMATRIX WVP;
DirectX::XMMATRIX World;
DirectX::XMMATRIX camView;
DirectX::XMMATRIX camProjection;
DirectX::XMVECTOR camPosition;
DirectX::XMVECTOR camTarget;
DirectX::XMVECTOR camUp;
// Some math for object transformation
DirectX::XMMATRIX cube1world;
DirectX::XMMATRIX cube2world;
DirectX::XMMATRIX Roration;
DirectX::XMMATRIX Scale;
DirectX::XMMATRIX TRanslation;
float rot = 0.01f;
// Pipeline stage states object (used to customized some fixed stage)
ID3D11RasterizerState* WireFrame;
ID3D11RasterizerState* SolidRender;
// window management
LPCTSTR WndClassName = "firstwindow";
HWND hwnd = nullptr;
HRESULT hr;
const int Width = 300;
const int Height = 300;
// Function Prototypes //
bool InitializeDirect3d11App(HINSTANCE hInstance);
void CleanUp();
bool InitScene();
void UpdateScene();
void DrawScene();
bool InitializeWindow(HINSTANCE hInstance, int ShowWnd, int width, int height, bool windowed);
int messageloop();
LRESULT CALLBACK WndProc(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam);
// Light structure used in the cbPerFrame
struct Light {
DirectX::XMFLOAT3 dir;
float pad; // padding to match with shader's constant buffer packing scheme
DirectX::XMFLOAT4 ambient;
DirectX::XMFLOAT4 diffuse;
Light() { ZeroMemory(this, sizeof(Light)); }
};
Light g_light;
// Vertex Structure and Input Data Layout
struct Vertex {
Vertex() {}
Vertex(
float x, float y, float z, float r, float g, float b, float a, float nx, float ny, float nz)
: pos(x, y, z), color(r, g, b, a), normal(nx, ny, nz) {}
DirectX::XMFLOAT3 pos; // DirectXMath use DirectX namespace
DirectX::XMFLOAT4 color;
DirectX::XMFLOAT3 normal;
};
D3D11_INPUT_ELEMENT_DESC layout[]
= {{
"POSITION", 0, // a Name and an Index to map elements in the shader
DXGI_FORMAT_R32G32B32_FLOAT, // enum member of DXGI_FORMAT; define the format of the element
0, // input slot; kind of a flexible and optional configuration
0, // byte offset
D3D11_INPUT_PER_VERTEX_DATA, // ADVANCED, discussed later; about instancing
0 // ADVANCED; also for instancing
},
{"COLOR", 0, DXGI_FORMAT_R32G32B32A32_FLOAT, 0,
12, // skip the first 3 coordinate data
D3D11_INPUT_PER_VERTEX_DATA, 0},
{"NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0,
28, // skip the fisrt 3 coordinnate and 4 colors ata
D3D11_INPUT_PER_VERTEX_DATA, 0}};
UINT numElements = ARRAYSIZE(layout); // = 3
// effect constant buffer structure;
// memory layout must match those in the cbuffer struct in Shader
struct cbPerObject {
DirectX::XMMATRIX WVP;
DirectX::XMMATRIX World; // used for world-space lighting
};
cbPerObject g_cbPerObj;
struct cbPerFrame {
Light light;
};
cbPerFrame g_cbPerFrm;
// Function Definitions //
// Windows main function
int WINAPI WinMain(HINSTANCE hInstance, // program instance
HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nShowCmd) {
if (!InitializeWindow(hInstance, nShowCmd, Width, Height, true)) {
MessageBox(0, "Window Initialization - Failed", "Error", MB_OK);
return 0;
}
if (!InitializeDirect3d11App(hInstance)) // Initialize Direct3D
{
MessageBox(0, "Direct3D Initialization - Failed", "Error", MB_OK);
return 0;
}
if (!InitScene()) // Initialize our scene
{
MessageBox(0, "Scene Initialization - Failed", "Error", MB_OK);
return 0;
}
messageloop();
CleanUp();
return 0;
}
// window initialization (used in Windows main function)
bool InitializeWindow(HINSTANCE hInstance, // program instance
int ShowWnd, // whther to show the window ?
int width, int height, // size of the window
bool windowed) //
{
WNDCLASSEX wc;
wc.cbSize = sizeof(WNDCLASSEX);
wc.style = CS_HREDRAW | CS_VREDRAW;
wc.lpfnWndProc = WndProc;
wc.cbClsExtra = NULL;
wc.cbWndExtra = NULL;
wc.hInstance = hInstance;
wc.hIcon = LoadIcon(NULL, IDI_APPLICATION);
wc.hCursor = LoadCursor(NULL, IDC_ARROW);
wc.hbrBackground = (HBRUSH)(COLOR_WINDOW + 2);
wc.lpszMenuName = NULL;
wc.lpszClassName = WndClassName;
wc.hIconSm = LoadIcon(NULL, IDI_APPLICATION);
if (!RegisterClassEx(&wc)) {
MessageBox(NULL, "Error registering class", "Error", MB_OK | MB_ICONERROR);
return 1;
}
hwnd = CreateWindowEx( // extended window class, based on the basic class
NULL, WndClassName, "ex window title, you can set !!!", WS_OVERLAPPEDWINDOW, CW_USEDEFAULT,
CW_USEDEFAULT, width, height, NULL, NULL, hInstance, NULL);
if (!hwnd) {
MessageBox(NULL, "Error creating window", "Error", MB_OK | MB_ICONERROR);
return 1;
}
ShowWindow(hwnd, ShowWnd);
UpdateWindow(hwnd);
return true;
}
// Initialize D3D interfaces
bool InitializeDirect3d11App(HINSTANCE hInstance) {
// Describe our Buffer (drawing on the window)
DXGI_MODE_DESC bufferDesc;
ZeroMemory(&bufferDesc, sizeof(DXGI_MODE_DESC));
bufferDesc.Width = Width;
bufferDesc.Height = Height;
bufferDesc.RefreshRate.Numerator = 60;
bufferDesc.RefreshRate.Denominator = 1;
bufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
bufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED;
bufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED;
// Describe our SwapChain (multiple window buffer; usually for double-buffering)
DXGI_SWAP_CHAIN_DESC swapChainDesc;
ZeroMemory(&swapChainDesc, sizeof(DXGI_SWAP_CHAIN_DESC));
swapChainDesc.BufferDesc = bufferDesc;
swapChainDesc.SampleDesc.Count = 1; // 1 for double buffer; 2 for triple buffer
swapChainDesc.SampleDesc.Quality = 0;
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.BufferCount = 1;
swapChainDesc.OutputWindow = hwnd;
swapChainDesc.Windowed = TRUE;
swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_DISCARD;
// Create our SwapChain
hr = D3D11CreateDeviceAndSwapChain(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, NULL, NULL, NULL,
D3D11_SDK_VERSION, &swapChainDesc, &SwapChain, &d3d11Device, NULL, &d3d11DevCon);
// Create our BackBuffer
ID3D11Texture2D* BackBuffer;
hr = SwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)&BackBuffer);
// Create our Render Target
hr = d3d11Device->CreateRenderTargetView(BackBuffer, NULL, &renderTargetView);
BackBuffer->Release();
// Describe the depth/stencil buffer (it is a texture buffer)
D3D11_TEXTURE2D_DESC depthStencilDesc;
depthStencilDesc.Width = Width;
depthStencilDesc.Height = Height;
depthStencilDesc.MipLevels
= 1; // max number of mipmap level; = 0 let d3d to generate a full set of mipmap
depthStencilDesc.ArraySize = 1; // number of textures
depthStencilDesc.Format = DXGI_FORMAT_D24_UNORM_S8_UINT; // 24 bit (depth) + 8 bit (stencil)
depthStencilDesc.SampleDesc.Count = 1; // multisampling parameters
depthStencilDesc.SampleDesc.Quality = 0;
depthStencilDesc.Usage = D3D11_USAGE_DEFAULT;
depthStencilDesc.BindFlags = D3D11_BIND_DEPTH_STENCIL; // used as depth/stencil buffer
depthStencilDesc.CPUAccessFlags = 0;
depthStencilDesc.MiscFlags = 0;
// Create the depth/stencil buffer
d3d11Device->CreateTexture2D(&depthStencilDesc, NULL, &depthStencilBuffer);
// Create the depth/stencil view object
d3d11Device->CreateDepthStencilView(depthStencilBuffer, NULL, &depthSentcilView);
// Set our Render Target (with a depth/sentil ciew)
d3d11DevCon->OMSetRenderTargets(1, // number of render targets to bind
&renderTargetView, // render target view object pointer
depthSentcilView // depth/stencil view object
);
return true;
}
// Release those global objects
void CleanUp() {
// Release the COM Objects we created
SwapChain->Release();
d3d11Device->Release();
d3d11DevCon->Release();
renderTargetView->Release();
depthSentcilView->Release();
depthStencilBuffer->Release();
// Release the rendering-related object
cubeIndexBuffer->Release();
cubeVertBuffer->Release();
VS->Release();
PS->Release();
VS_Buffer->Release();
PS_Buffer->Release();
vertLayout->Release();
cbPerObjectBuffer->Release();
cbPerFrameBuffer->Release();
}
// Initialize the Scene&rendering related stuffs
bool InitScene() {
// Compile Shaders from shader file
// NOTE: new interface for D3DCompiler; different from the original tutorial
hr = D3DCompileFromFile(L"test_effects.hlsl", // shader file name
0, // shader macros
0, // shader includes
"VS", // shader entry pointer
"vs_4_0", // shader target: shader model version or effect type
0, 0, // two optional flags
&VS_Buffer, // recieve compiled shader code
0 // receive optional error repot
);
hr = D3DCompileFromFile(L"test_effects.hlsl", 0, 0, "PS", "ps_4_0", 0, 0, &PS_Buffer, 0);
// Create the Shader Objects
hr = d3d11Device->CreateVertexShader(
VS_Buffer->GetBufferPointer(), VS_Buffer->GetBufferSize(), // specific the shader data
NULL, // pointer to a class linkage interface; no using now
&VS // receive the returned vertex shader object
);
hr = d3d11Device->CreatePixelShader(
PS_Buffer->GetBufferPointer(), PS_Buffer->GetBufferSize(), NULL, &PS);
// Set Vertex and Pixel Shaders (to be used on the pipeline)
d3d11DevCon->VSSetShader(VS, // compiled shader object
0, // set the used interface (related to the class linkage interface?); not using currently
0 // the number of class-instance (related to above); not using currently
);
d3d11DevCon->PSSetShader(PS, 0, 0);
// Create the vertex & index buffer //
// the data we will use
Vertex v[] = {
// Position Color Normal
Vertex(-1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f),
Vertex(-1.0f, +1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, +1.0f, -1.0f),
Vertex(+1.0f, +1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, +1.0f, +1.0f, -1.0f),
Vertex(+1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, 1.0f, +1.0f, -1.0f, -1.0f),
Vertex(-1.0f, -1.0f, +1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, -1.0f, +1.0f),
Vertex(-1.0f, +1.0f, +1.0f, 1.0f, 1.0f, 1.0f, 1.0f, -1.0f, +1.0f, +1.0f),
Vertex(+1.0f, +1.0f, +1.0f, 1.0f, 1.0f, 1.0f, 1.0f, +1.0f, +1.0f, +1.0f),
Vertex(+1.0f, -1.0f, +1.0f, 1.0f, 1.0f, 1.0f, 1.0f, +1.0f, -1.0f, +1.0f),
};
DWORD indices[] = {// front face
0, 1, 2, 0, 2, 3,
// back face
4, 6, 5, 4, 7, 6,
// left face
4, 5, 1, 4, 1, 0,
// right face
3, 2, 6, 3, 6, 7,
// top face
1, 5, 6, 1, 6, 2,
// bottom face
4, 0, 3, 4, 3, 7};
// Create a buffer description for vertex data
D3D11_BUFFER_DESC vertexBufferDesc;
ZeroMemory(&vertexBufferDesc, sizeof(vertexBufferDesc));
vertexBufferDesc.Usage
= D3D11_USAGE_DEFAULT; // how the buffer will be read from and written to; use default
vertexBufferDesc.ByteWidth = sizeof(Vertex) * ARRAYSIZE(v); // size of the buffer
vertexBufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER; // used as vertex buffer
vertexBufferDesc.CPUAccessFlags = 0; // how it will be used by the CPU; we don't use it
vertexBufferDesc.MiscFlags = 0; // extra flags; not using
vertexBufferDesc.StructureByteStride = NULL; // not using
// Create a buffer description for indices data
D3D11_BUFFER_DESC indexBufferDesc;
ZeroMemory(&indexBufferDesc, sizeof(indexBufferDesc));
indexBufferDesc.Usage = D3D11_USAGE_DEFAULT; // I guess this is for DRAM type
indexBufferDesc.ByteWidth = sizeof(DWORD) * ARRAYSIZE(indices);
indexBufferDesc.BindFlags
= D3D11_BIND_INDEX_BUFFER; // different flags from vertex buffer; must be set
indexBufferDesc.CPUAccessFlags = 0;
indexBufferDesc.MiscFlags = 0;
indexBufferDesc.StructureByteStride = NULL;
// Create the vertex buffer data object
D3D11_SUBRESOURCE_DATA vertexBufferData; // parameter struct ?
ZeroMemory(&vertexBufferData, sizeof(vertexBufferData));
vertexBufferData.pSysMem = v; // the data to be put (defined above)
// vertexBufferData.SysMemPitch; // width of a line in the data; used in 2D/3D texture
// vertexBufferData.SysMemSlicePitch; // size of a depth-level; used in 3D texture
hr = d3d11Device->CreateBuffer(&vertexBufferDesc, // buffer description
&vertexBufferData, // parameter set above
&cubeVertBuffer // receive the returned ID3D11Buffer object
);
// Create the index buffer data object
D3D11_SUBRESOURCE_DATA indexBufferData; // parameter struct ?
ZeroMemory(&indexBufferData, sizeof(indexBufferData));
indexBufferData.pSysMem = indices;
d3d11Device->CreateBuffer(&indexBufferDesc, &indexBufferData, &cubeIndexBuffer);
// Set the vertex buffer (bind it to the Input Assembler)
UINT stride = sizeof(Vertex);
UINT offset = 0;
d3d11DevCon->IASetVertexBuffers(0, // the input slot we use as start
1, // number of buffer to bind; we bind one buffer
&cubeVertBuffer, // pointer to the buffer object
&stride, // pStrides; data size for each vertex
&offset // starting offset in the data
);
// Set the index buffer (bind to IA)
d3d11DevCon->IASetIndexBuffer( // NOTE: IndexBuffer !!
cubeIndexBuffer, // pointer o a buffer data object; must have D3D11_BIND_INDEX_BUFFER flag
DXGI_FORMAT_R32_UINT, // data format
0 // UINT; starting offset in the data
);
// Create the Input Layout
d3d11Device->CreateInputLayout(
layout, // element layout description (defined above at global scope)
numElements, // number of elements; (also defined at global scope)
VS_Buffer->GetBufferPointer(), VS_Buffer->GetBufferSize(), // the shader byte code
&vertLayout // received the returned Input Layout
);
// Set the Input Layout (bind to Input Assembler)
d3d11DevCon->IASetInputLayout(vertLayout);
// Set Primitive Topology (tell InputAssemble what type of primitives we are sending)
// alternatives: point list, line strip, line list, triangle strip, triangle ist,
// primitives with adjacency (only for geometry shader)
d3d11DevCon->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
// Create the D3D Viewport (settings are used in the Rasterizer Stage)
D3D11_VIEWPORT viewport;
ZeroMemory(&viewport, sizeof(D3D11_VIEWPORT));
viewport.TopLeftX = 0; // position of
viewport.TopLeftY = 0; // the top-left corner in the window.
viewport.Width = Width;
viewport.Height = Height;
viewport.MinDepth = 0.0f; // set depth range; used for converting z-values to depth
viewport.MaxDepth = 1.0f; // furthest value
// Set the Viewport (bind to the Raster Stage of he pipeline)
d3d11DevCon->RSSetViewports(1, // number of viewport to set
&viewport // array of viewports
);
// Camera data
camPosition = DirectX::XMVectorSet(0.0f, 2.0f, -10.0f, 0.0f);
camTarget = DirectX::XMVectorSet(0.0f, 0.0f, 0.0f, 0.0f);
camUp = DirectX::XMVectorSet(0.0f, 1.0f, 0.0f, 0.0f);
camView = DirectX::XMMatrixLookAtLH(
camPosition, camTarget, camUp); // directX Math function to created camera view transform
camProjection = DirectX::XMMatrixPerspectiveFovLH(
0.4f * 3.14f, (float)Width / Height, 1.0f, 1000.0f); // directX Math function
// Create a constant buffer for transform
D3D11_BUFFER_DESC cbbd;
ZeroMemory(&cbbd, sizeof(D3D11_BUFFER_DESC));
cbbd.Usage = D3D11_USAGE_DEFAULT;
cbbd.ByteWidth = sizeof(cbPerObject);
cbbd.BindFlags = D3D11_BIND_CONSTANT_BUFFER; // NOTE: we use Constant Buffer
cbbd.CPUAccessFlags = 0;
cbbd.MiscFlags = 0;
d3d11Device->CreateBuffer(&cbbd, NULL, &cbPerObjectBuffer);
// Light data
g_light.dir = DirectX::XMFLOAT3(0.25f, 0.5f, 0.0f);
g_light.ambient = DirectX::XMFLOAT4(0.2f, 0.2f, 0.2f, 1.0f);
g_light.diffuse = DirectX::XMFLOAT4(1.0f, 0.9f, 0.4f, 1.0f);
// Create a constant buffer for light
ZeroMemory(&cbbd, sizeof(D3D11_BUFFER_DESC)); // reuse the DESC struct above
cbbd.Usage = D3D11_USAGE_DEFAULT;
cbbd.ByteWidth = sizeof(cbPerFrame);
cbbd.BindFlags = D3D11_BIND_CONSTANT_BUFFER; // NOTE: we use Constant Buffer
cbbd.CPUAccessFlags = 0;
cbbd.MiscFlags = 0;
d3d11Device->CreateBuffer(&cbbd, NULL, &cbPerFrameBuffer);
return true;
}
void UpdateScene() {
// Keep rotating the cube
if ((rot += 0.0005f) > 6.28f) rot = 0.0f; // the rorate angle
// Reset cube world each frame //
DirectX::XMVECTOR rotaxis = DirectX::XMVectorSet(0.0f, 1.0f, 0.0f, 0.0f);
Roration = DirectX::XMMatrixRotationAxis(rotaxis, rot);
TRanslation = DirectX::XMMatrixTranslation(0.0f, 0.0f, 4.0f); // alway away from origin by 4.0
cube1world = TRanslation * Roration;
Roration = DirectX::XMMatrixRotationAxis(rotaxis, -rot); // reversed direction
Scale = DirectX::XMMatrixScaling(1.3f, 3.0f, 1.3f); // little bit bigger than cube 1
cube2world = Roration * Scale;
}
// Render the scene
void DrawScene() {
// Clear our backbuffer
float bgColor[4] = {0.3f, 0.6f, 0.7f, 1.0f};
d3d11DevCon->ClearRenderTargetView(renderTargetView, bgColor);
// Also clear the depth/stencil view each frame
d3d11DevCon->ClearDepthStencilView(depthSentcilView, // the view to clear
D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, // specify the part of the depth/stencil view to clear
1.0f, // clear value for depth; should set to the furtheast value (we use 1.0 as furthest)
0 // clear value for stencil; we actually not using stencil currently
);
// Calculate the WVP matrix and send it (set to VS constant buffer in the pipeline)
// World = DirectX::XMMatrixIdentity();
// WVP = World * camView * camProjection;
// g_cbPerObj.WVP = DirectX::XMMatrixTranspose(WVP);
// d3d11DevCon->UpdateSubresource(
// cbPerObjectBuffer, // pointer to the destination buffer
// 0, // index for the destination (of array above)
// NULL, // optional pointer to a D3D11_BOX
// &g_cbPerObj, // pointer to source data in memory
// 0, // size of a row (used for 2D/3D buffer?)
// 0 // size of a depth slice (used for 3D buffer?)
//);
// d3d11DevCon->VSSetConstantBuffers(
// 0, // start slot in the constant buffer to be set
// 1, // number of buffers to set (start from the slot above)
// &cbPerObjectBuffer // array of constant buffer to send
//);
// Set the light for the scene
g_cbPerFrm.light = g_light;
d3d11DevCon->UpdateSubresource(
cbPerFrameBuffer, 0, NULL, &g_cbPerFrm, 0, 0); // update into buffer object
d3d11DevCon->PSSetConstantBuffers(0, 1, &cbPerFrameBuffer); // send to the Pixel Stage
// Reset the Vertex and Pixel shaders (because we just update the constant data in the shader
// code?)
d3d11DevCon->VSSetShader(VS, 0, 0);
d3d11DevCon->PSSetShader(PS, 0, 0);
// Draw cube 1 //
// Set transform
WVP = cube1world * camView * camProjection;
g_cbPerObj.WVP = DirectX::XMMatrixTranspose(WVP); // shader use `x * A`
g_cbPerObj.World = DirectX::XMMatrixTranspose(cube1world);
d3d11DevCon->UpdateSubresource(cbPerObjectBuffer, 0, NULL, &g_cbPerObj, 0, 0);
d3d11DevCon->VSSetConstantBuffers(0, 1, &cbPerObjectBuffer);
// Draw
d3d11DevCon->DrawIndexed(36, 0, 0);
// Draw cube 2 //
// Set transform
WVP = cube2world * camView * camProjection;
g_cbPerObj.WVP = DirectX::XMMatrixTranspose(WVP); // shader use left-mul
g_cbPerObj.World = DirectX::XMMatrixTranspose(cube2world);
d3d11DevCon->UpdateSubresource(cbPerObjectBuffer, 0, NULL, &g_cbPerObj, 0, 0);
d3d11DevCon->VSSetConstantBuffers(0, 1, &cbPerObjectBuffer);
// Draw
d3d11DevCon->DrawIndexed(36, 0, 0);
// Present the backbuffer to the screen
SwapChain->Present(0, // control vertical synchronization; see MSDN
0 // flags to control presentation; see MSDN
);
}
int messageloop() {
MSG msg;
ZeroMemory(&msg, sizeof(MSG));
while (true) {
BOOL PeekMessageL(
LPMSG lpMsg, HWND hWnd, UINT wMsgFilterMin, UINT wMsgFilterMax, UINT wRemoveMsg);
if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
if (msg.message == WM_QUIT) break;
TranslateMessage(&msg);
DispatchMessage(&msg);
} else {
// run game code
UpdateScene();
DrawScene();
}
}
return msg.wParam;
}
LRESULT CALLBACK WndProc(HWND hwnd, UINT msg, WPARAM wParam, LPARAM lParam) {
switch (msg) {
case WM_KEYDOWN:
if (wParam == VK_ESCAPE) { DestroyWindow(hwnd); }
return 0;
case WM_DESTROY:
PostQuitMessage(0);
return 0;
}
return DefWindowProc(hwnd, msg, wParam, lParam);
} |
vaibhav-s/self-driving-car | steering-models/community-models/autumn/autumn/__init__.py | <reponame>vaibhav-s/self-driving-car
from .cnn_model import ConvModel
from .data_reader import DataReader
|
JosephCrandall44/CSCI_3212_Algorithms | jwcrandall_a1/BinarySearchTree.java | /*
Binary Search Tree
Programed by <NAME>
Last modifed September 28, 2015
*/
import edu.gwu.algtest.*;
import edu.gwu.debug.*;
import edu.gwu.util.*;
import java.util.*;
import java.lang.*;
//NOTE keys are unique in this BST, no duplicates
//Tree node class has the following fields has the following parameters
//public TreeNode(java.lang.Comparable key,java.lang.Object value,TreeNode left,TreeNode right,TreeNode parent)
public class BinarySearchTree implements TreeSearchAlgorithm {
//int count = 0;
public TreeNode root;
//Interface TreeSearchAlgorithm
public TreeNode getRoot(){
return(root); //the root
}
//All Superinterfaces: edu.gwu.algtest.Algorithm, edu.gwu.algtest.OrderedSearchAlgorithm, edu.gwu.algtest.SearchAlgorithm
//edu.gwu.algtest.SearchAlgorithm
// initialize
public void initialize(int maxSize){
//TreeNode root = getRoot();
root = null;
}
//getCurrentSize
public int getCurrentSize(){
TreeNode root = getRoot();
return(countSize(root));
}
// used in geCurrentSize
int countSize(TreeNode node){
if( node == null){
return(0);
}
else{
//root only case
if( node.left == null && node.right == null){
return(1);
}
//count the left and right children recusivly
else{
return(1 + (countSize(node.left) + countSize(node.right)));
}
}
}
//edu.gwu.algtest.Algorithm
//get name
public java.lang.String getName(){
return "Joseph Crandall's implementation of Binary Search Tree";
}
//get property extractor
public void setPropertyExtractor(int algID,edu.gwu.util.PropertyExtractor prop){
//empty implementations, method definition empty body
}
//comprable java.lang.Comparable
//edu.gwu.algtest.OrderedSearchAlgorithm
//insert
public Object insert(Comparable keyin,Object valuein){
//creating the root if this is the first value to be inserted
if (root == null){
//System.out.println("inserting root");
root = new TreeNode(); // do not initialize a new root every time this is wrong TreeNode root = new TreeNode();
//TreeNode in = new TreeNode();
root.key = keyin; //might not need these
root.value = valuein; //or these
root.left = null;
root.right = null;
root.parent = null;
//System.out.println(root.key);
return (root.value);
}
else{
//System.out.println("recursive insert");
return(recursiveInsert(root, keyin, valuein));
}
}
//the reursive insert used in the insert method
public Object recursiveInsert(TreeNode inrec,Comparable keyin,Object valuein ){
//System.out.println("inside recursive insert");
int i = keyin.compareTo(inrec.key);
//insert left
if(i < 0){
if(inrec.left == null){
//System.out.println("left");
TreeNode inLeft = new TreeNode();
inrec.left = inLeft; // linkeding down
inLeft.key = keyin;
inLeft.value = valuein;
inLeft.left = null;
inLeft.right = null;
inLeft.parent = inrec; // linking up //this is like a doubly linked list with a tree
//System.out.println(inLeft.key);
//return(inLeft.value);
valuein = (inLeft.value);
}
else{
//insert left recursively
recursiveInsert(inrec.left, keyin, valuein);
}
}
//insert right
if(i > 0){
if(inrec.right == null){
//System.out.println("right");
TreeNode inRight = new TreeNode();
inrec.right = inRight; // linking down
inRight.key = keyin;
inRight.value = valuein;
inRight.left = null;
inRight.right = null;
inRight.parent = inrec; // linking up
//System.out.println(inRight.key);
//return(inRight.value);
valuein = (inRight.value);
}
else{
//insert right recursively
recursiveInsert(inrec.right, keyin, valuein);
}
}
//else
if(i == 0){
//replace duplicate
//System.out.println("duplicate value NOT KEY");
TreeNode temp = new TreeNode();
temp.value = inrec.value;
inrec.value = valuein;
//System.out.println(inrec.value);
//return(temp.value);
valuein = temp.value;
}
return(valuein); // this is not correct, just needed to put something here so that the code compiles
}
//print Use Left Node Right Print
public void leftNodeRightPrint(TreeNode node){
//System.out.println("inside print");
if (node.left != null){
leftNodeRightPrint(node.left);
}
System.out.println("key : " + node.key + " value : " + node.value);
if (node.right != null){
leftNodeRightPrint(node.right);
}
}
//search
public ComparableKeyValuePair search(Comparable key){
//System.out.println("inside search");
ComparableKeyValuePair searchResult = new ComparableKeyValuePair();
//searchResult.key = null;
//searchResult.value = null;
if(getRoot() == null){
return(null);
}
searchResult = (searchRecursive(getRoot(),key, searchResult)); //there is a better way
if(searchResult.key == null){
return null;
}
return searchResult;
}
//searchRecursive
public ComparableKeyValuePair searchRecursive(TreeNode node,Comparable key, ComparableKeyValuePair searchResult){
//System.out.println("inside print");
//System.out.println("inside search recursive");
if (node.left != null){
searchRecursive(node.left, key, searchResult);
}
//if a key is found that is equal to the search key a new ComparableKeyValuePair is created and returned
int i = key.compareTo(node.key);
if (i == 0){
//ComparableKeyValuePair searchResult = new ComparableKeyValuePair();
searchResult.key = node.key;
searchResult.value = node.value;
//blank = searchResult;
//System.out.println("key found:" + searchResult.key + " value found" + searchResult.value);
//System.out.println(searchResult.value);
//return(searchResult);
}
if (node.right != null){
searchRecursive(node.right, key, searchResult);
}
//if no key value maches the search key value the method returns null
return (searchResult);
}
// I have to figure out how to place bith the value and the key in a comparablekey value pair when search is over.
//minimum
public ComparableKeyValuePair minimum(){
//System.out.println("inside minimum");
//get root
root = getRoot();
//root null case
if(root == null){
//System.out.println("root null case");
return(null);
}
//initilize the return value
ComparableKeyValuePair min = new ComparableKeyValuePair();
//System.out.println("past root null case");
//root left null clase
if (root.left == null){
//ComparableKeyValuePair min = new ComparableKeyValuePair();
//System.out.println("root is min");
min.key = root.key;
min.value = root.value;
//System.out.println("root is min : " + min.key);
//System.out.print(min.key);
//return(minroot);
}
if(root.left != null){
//ComparableKeyValuePair min = new ComparableKeyValuePair();
min = minimumRecursive(root.left, min);
}
return(min);
//error case, should never be returned
//return (null);
}
//minimum recursive method
public ComparableKeyValuePair minimumRecursive(TreeNode node, ComparableKeyValuePair min){
//System.out.println("inside minimum recursive");
//recursing further left
if (node.left != null){
//System.out.println("inside minimum recursive going left");
minimumRecursive(node.left, min);
}
//setting the min key value pair
if (node.left == null){
//System.out.print(root.left);
//ComparableKeyValuePair min = new ComparableKeyValuePair();
min.key = node.key;
min.value = node.value;
//return(min);
}
return(min);
}
//maximum
public ComparableKeyValuePair maximum(){
//System.out.println("inside maximum");
//get root
root = getRoot();
//root null case
if(root == null){
//System.out.println(" root null case");
return(null);
}
//System.out.println("past root null case");
//initilize the return value
ComparableKeyValuePair max = new ComparableKeyValuePair();
//root left null clase
if (root.right == null){
//System.out.println("root is max");
//ComparableKeyValuePair maxroot = new ComparableKeyValuePair();
max.key = root.key;
max.value = root.value;
//System.out.println("root case");
//return(maxroot);
}
//System.out.println("past root case");
//recursive if root left not null
if (root.right != null){
//ComparableKeyValuePair max = new ComparableKeyValuePair();
max = maximumRecursive(root.right, max);
}
//error case, should never be returned
return (max);
}
//maximum recursive method
public ComparableKeyValuePair maximumRecursive(TreeNode node, ComparableKeyValuePair max){
//System.out.println("inside maximum recursive");
if (node.right != null){
//System.out.println("inside maximum recursive going right");
//System.out.println(node.right.key);
maximumRecursive(node.right, max);
}
//ComparableKeyValuePair max = new ComparableKeyValuePair();
if (node.right == null){
max.key = node.key;
max.value = node.value;
//System.out.print(max.key);
//System.out.println("found max : " + max.key);
}
return(max);
}
//maximum recursive method wich returns TreeNode
public TreeNode maximumRecursiveTN(TreeNode node, ComparableKeyValuePair max){
if (root.right != null){
maximumRecursive(root.right, max);
}
return(node);
}
//delete and return the object from the deleted node
public Object delete(Comparable key){
//System.out.println("the key to be deleted : " + key);
if(search(key).key == null){
System.out.println("key does not exist case, return null object");
return (null);
}
//Object deletedValue = null;
TreeNode temp = new TreeNode(key, null);
return (deleteRecursive(temp,getRoot()));
//deletedValue = deleteRecursive(temp,getRoot());
//return deletedValue;
}
//recursive delete
public Object deleteRecursive(TreeNode node, TreeNode start){
Comparable startKey = start.key;
Object deletedValue = null;
if(startKey.compareTo(node.key)>0){
//recurse left
deletedValue = deleteRecursive(node, start.left);
}
else if(startKey.compareTo(node.key)<0){
//recurse right
deletedValue = deleteRecursive(node, start.right);
}
else if (startKey.compareTo(node.key)==0){
//tree node start.value is the node to be deleted and value removed
deletedValue = start.value;
if(startKey.compareTo(getRoot().key)==0){
//System.out.println("delete the node " + getRoot().key);
//node to be deleted is root of tree (null parent)
//System.out.println("**Deleting: (Key = "+startKey+" = root)");
if(start.left == null && start.right == null){
//node to be deleted has 0 children
start = null;
root = null;
}
else if(start.left == null){
//node to be deleted only has a right child
root = start.right;
start.right.parent = null;
start = null;
}
else if(start.right == null){
//node to be deleted only has a left child
root = start.left;
start.left.parent = null;
start = null;
}
else{
//node to be deleted has two children
//find successor of node to be deleted
//successor will have at most one node -think about it. very clever!
Comparable successorKey = successor(startKey); //get successor's key
//System.out.println("successorKey = "+successorKey);
Object successorValue = delete(successorKey); //get successor's value and remove it from the tree
TreeNode successor = new TreeNode(successorKey, successorValue); //make a successor node
//now, replace node to be deleted with successor node
root = successor;
successor.parent = null;
successor.left = start.left;
successor.right = start.right;
}
return deletedValue;
//return null;
}
if(start.left == null && start.right == null){
//node to be deleted has 0 children
if(start.equals(start.parent.left)){
start.parent.left = null;
}
else if(start.equals(start.parent.right)){
start.parent.right = null;
}
}
else if(start.left == null){
//node to be deleted only has a right child
if(start.equals(start.parent.left)){
start.right.parent = start.parent;
start.parent.left = start.right;
}
else if(start.equals(start.parent.right)){
start.right.parent = start.parent;
start.parent.right = start.right;
}
}
else if(start.right == null){
//node to be deleted only has a left child
if(start.equals(start.parent.left)){
start.left.parent = start.parent;
start.parent.left = start.left;
}
else if(start.equals(start.parent.right)){
start.left.parent = start.parent;
start.parent.right = start.left;
}
}
else{
//CASE: node to be deleted has two children
//find successor of node to be deleted
//successor will have at most one node -think about it. very clever!
Comparable successorKey = successor(startKey); //get successor's key
//System.out.println("successorKey = "+successorKey);
Object successorValue = delete(successorKey); //get successor's value and remove it from the tree
TreeNode successor = new TreeNode(successorKey, successorValue); //make a successor node
//now, replace node to be deleted with successor node
if(start.equals(start.parent.left)){
successor.parent = start.parent;
successor.left = start.left;
successor.right = start.right;
start.parent.left = successor;
}else if(start.equals(start.parent.right)){
successor.parent = start.parent;
successor.left = start.left;
successor.right = start.right;
start.parent.right = successor;
}
}
}
return deletedValue;
}
//sucessor
public Comparable successor (Comparable key){
Comparable successor = null;
//print
if(search(key).key == null){
//key not in tree
return successor;
}
Enumeration e = getKeys();
Comparable temp = null;
Comparable prev = null;
while (e.hasMoreElements()){
temp = (Comparable) e.nextElement();
//successor = temp
if(prev != null){
if(prev.equals(key)){
successor = temp;
return successor;
}
}
prev = temp;
}
//System.out.print("successor: " + successor);
if(!e.hasMoreElements()){
//System.out.println("--key("+key+") is maximum. No Successor");
}
return successor;
}
//predecessor
public Comparable predecessor (Comparable key){
Comparable predecessor = null;
//print
if(search(key).key == null){
//key is not in tree
return predecessor;
}
Enumeration e = getKeys();
Comparable temp = (Comparable) e.nextElement();
if(temp.equals(key)){
//key is minimum - no predecessor
//System.out.println("--key ("+key+") is minimum. No predecessor");
return predecessor;
}
while(e.hasMoreElements()){
predecessor = temp;
temp = (Comparable) e.nextElement();
if(temp.equals(key)){
return predecessor;
}
}
return predecessor;
}
//getKeys
public Enumeration getKeys(){
Keys keys = new Keys(getCurrentSize());
//printBlue("getKeys() (sorted)");
getKeysRecursive(keys, getRoot());
Enumeration e = keys.getEnumeration();
return e;
}
//recursively add keys to Keys enum
public Keys getKeysRecursive(Keys keys, TreeNode node){
if(node.left != null)
getKeysRecursive(keys, node.left);
keys.addElement(node.key);
if(node.right != null)
getKeysRecursive(keys, node.right);
return keys;
}
//getValues
public Enumeration getValues(){
Values values = new Values(getCurrentSize());
getValuesRecursive(values, getRoot());
Enumeration e = values.getEnumeration();
return e;
}
//recursively add values to values enum
public Values getValuesRecursive(Values values, TreeNode node){
if(node.left != null)
getValuesRecursive(values, node.left);
values.addElement(node.value);
if(node.right != null)
getValuesRecursive(values, node.right);
return values;
//return(null);
}
public static void main(String[] args) {
System.out.println("The main is runing");
System.out.println("test insert!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
BinarySearchTree BST = new BinarySearchTree();
Comparable testComp = 4;
Object testObj = 6;
BST.insert(testComp, testObj);
Comparable testComp2 = 9;
Object testObj2 = 32;
BST.insert(testComp2, testObj2);
Comparable testComp3 = 2;
Object testObj3 = 87;
BST.insert(testComp3, testObj3);
Comparable testComp4 = 886;
Object testObj4 = 1342;
BST.insert(testComp4, testObj4);
Comparable testComp5 = 74;
Object testObj5 = 924;
BST.insert(testComp5, testObj5);
Comparable testComp6 = 14;
Object testObj6 = 83;
BST.insert(testComp6, testObj6);
Comparable testComp7 = 71;
Object testObj7 = 23;
BST.insert(testComp7, testObj7);
Comparable testComp8 = 39;
Object testObj8 = 2222;
BST.insert(testComp8, testObj8);
Comparable testComp9 = 1000;
Object testObj9 = 2222;
BST.insert(testComp9, testObj9);
System.out.println("testing print BST!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
TreeNode print = BST.getRoot();
BST.leftNodeRightPrint(print);
//testing non null search case
System.out.println("testing search BST!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
Comparable testCompser6 = 2;
ComparableKeyValuePair searchtest = BST.search(testCompser6);
System.out.println(searchtest);
Comparable testCompNull1 = 23;
System.out.println("null search result test!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
System.out.println(BST.search(testCompNull1));
System.out.println("testing getCurrent size!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
int sizetest = BST.getCurrentSize();
System.out.println(sizetest);
System.out.println("testing min!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
ComparableKeyValuePair mintest = BST.minimum();
System.out.println("min key: " + mintest.key + " min value:" + mintest.value);
System.out.println("testing max!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
ComparableKeyValuePair maxtest = BST.maximum();
System.out.println("max key: " + maxtest.key + " max value: " + maxtest.value);
System.out.println("testing successor!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
Comparable successor = BST.successor(testComp5);
System.out.println("the successor of key " + testComp5 + " is key " + successor );
System.out.println("testing predecessor!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
Comparable predecessor = BST.predecessor(testComp5);
System.out.println("the predecessor of key " + testComp5 + " is key " + predecessor );
// System.out.println("testing delete root!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
// Object deleted = BST.delete(testComp);
// System.out.println("deleted object " + deleted + " at key " + testComp);
// System.out.println(" the current root now is " + BST.getRoot().key);
// System.out.println("testing delete no children!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
// Object deleted = BST.delete(testComp3);
// System.out.println("deleted object " + deleted + " at key " + testComp3);
// System.out.println("testing delete with left child !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
// Object deleted = BST.delete(testComp5);
// System.out.println("deleted object " + deleted + " at key " + testComp5);
// System.out.println("testing delete with right child !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
// Object deleted = BST.delete(testComp6);
// System.out.println("deleted object " + deleted + " at key " + testComp6);
System.out.println("testing delete with left and right child !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
Object deleted = BST.delete(testComp4);
System.out.println("deleted object " + deleted + " at key " + testComp4);
}
}
//Keys Class
class Keys implements Enumeration{
Comparable[] keys;
int index = 0;
int size = 0;
Keys(int s){
keys = new Comparable[s];
}
public void addElement(Comparable key){
keys[size] = key;
size++;
}
public boolean hasMoreElements(){
if(index<size){
return true;
}else return false;
}
//next element
public Comparable nextElement(){
Comparable key = keys[index];
index++;
return key;
}
//get enumeration method
public Enumeration getEnumeration(){
index = 0;
return this;
}
}
//Values class
class Values implements Enumeration{
Object[] keys;
int index = 0;
int size = 0;
Values(int s){
keys = new Object[s];
}
//add element to key
public void addElement(Object key){
keys[size] = key;
size++;
}
//check if their are more elements to add
public boolean hasMoreElements(){
if(index<size){
return true;
}else return false;
}
//iterate to the next element
public Object nextElement(){
Object key = keys[index];
index++;
return key;
}
//get Enumeation method
public Enumeration getEnumeration(){
index = 0;
return this;
}
} |
ralfonso/spree | cmd/spreed/command.go | package main
import "github.com/codegangsta/cli"
var (
caCertFileFlag = cli.StringFlag{
Name: "ca.cert.file",
Value: "/etc/spree/certs/spree.ca.crt",
Usage: "CA cert file for TLS server",
EnvVar: "SPREE_CACERT_FILE",
}
certFileFlag = cli.StringFlag{
Name: "cert.file",
Value: "/etc/spree/certs/spree.dev.crt",
Usage: "cert file for TLS server",
EnvVar: "SPREE_CERT_FILE",
}
keyFileFlag = cli.StringFlag{
Name: "key.file",
Value: "/etc/spree/certs/spree.dev.key",
Usage: "key file for TLS server",
EnvVar: "SPREE_KEY_FILE",
}
rpcAddrFlag = cli.StringFlag{
Name: "rpc.addr",
Value: "localhost:4285",
Usage: "host:port for the grpc server",
EnvVar: "SPREE_RPC_ADDR",
}
httpAddrFlag = cli.StringFlag{
Name: "http.addr",
Value: "localhost:8383",
Usage: "host:port for the http server",
EnvVar: "SPREE_HTTP_ADDR",
}
dataDirFlag = cli.StringFlag{
Name: "data.dir",
Value: "/tmp",
Usage: "The directory in which to store uploaded files",
EnvVar: "SPREE_DATA_DIR",
}
dbFileFlag = cli.StringFlag{
Name: "db.file",
Value: "/tmp/spree.boltdb",
Usage: "The file to use for the database",
EnvVar: "SPREE_DB_FILE",
}
dbBucketFlag = cli.StringFlag{
Name: "db.bucket",
Value: "spree",
Usage: "The bucket to use in the database",
EnvVar: "SPREE_DB_BUCKET",
}
allowedEmailsFlag = cli.StringFlag{
Name: "allowed.emails",
Value: "",
Usage: "comma-separated string containing the emails allowed to access the server",
EnvVar: "SPREE_ALLOWED_EMAILS",
}
)
var GlobalFlags = []cli.Flag{
caCertFileFlag,
certFileFlag,
keyFileFlag,
rpcAddrFlag,
httpAddrFlag,
dataDirFlag,
dbFileFlag,
dbBucketFlag,
allowedEmailsFlag,
}
|
hillst/MetaStone | src/net/demilich/metastone/game/cards/concrete/warlock/TwistingNether.java | <filename>src/net/demilich/metastone/game/cards/concrete/warlock/TwistingNether.java
package net.demilich.metastone.game.cards.concrete.warlock;
import net.demilich.metastone.game.cards.Rarity;
import net.demilich.metastone.game.cards.SpellCard;
import net.demilich.metastone.game.entities.heroes.HeroClass;
import net.demilich.metastone.game.spells.DestroySpell;
import net.demilich.metastone.game.targeting.EntityReference;
import net.demilich.metastone.game.targeting.TargetSelection;
public class TwistingNether extends SpellCard {
public TwistingNether() {
super("Twisting Nether", Rarity.EPIC, HeroClass.WARLOCK, 8);
setDescription("Destroy all minions.");
setSpell(DestroySpell.create(EntityReference.ALL_MINIONS));
setTargetRequirement(TargetSelection.NONE);
}
@Override
public int getTypeId() {
return 356;
}
}
|
rjw57/tiw-computer | emulator/src/devices/bus/a2bus/a2corvus.cpp | // license:BSD-3-Clause
// copyright-holders:<NAME>
/*********************************************************************
a2corvus.c
Implementation of the Corvus flat-cable hard disk interface
for the Apple II.
This same card was used in the Corvus Concept.
C0n0 = drive read/write
C0n1 = read status (busy in bit 7, data direction in bit 6)
Reads and writes to C0n2+ happen; the contents of the reads are thrown away
immediately by all the code I've examined, and sending the writes to the
drive's write port makes it not work so they're intended to be ignored too.
5 MB: -chs 144,4,20 -ss 512
10 MB: -chs 358,3,20 -ss 512
20 MB: -chs 388,5,20 -ss 512
To set up a disk from scratch on the Apple II:
1) Create a disk of your desired capacity using CHDMAN -c none and the parameters
listed above for each of the possible sizes.
2) Boot apple2p with the corvus in slot 2 and a diskii(ng) in slot 6 with the
"Corvus Hard Drive - Diagnostics.dsk" mounted.
3) Press F to format. Accept all the default options from now on;
there is no "format switch" to worry about with the current emulation.
4) Quit MESS. Restart with the corvus in slot 6 and a diskii(ng) in slot 7
with the "Corvus Hard Drive - Utilities Disk 1.dsk" mounted.
5) When you get the BASIC prompt, "RUN BSYSGEN"
6) Choose drive 1 and press Y at "OK TO BSYSGEN?"
7) When the format completes, type "RUN APPLESOFT BOOT PREP" and press Enter.
8) Once it finishes, quit MESS. Remove the diskii(ng) from slot 7 and
the system will boot into DOS 3.3 from the Corvus HD.
TODO: but there are no Corvus drivers present after that, only
Disk II?
*********************************************************************/
#include "emu.h"
#include "a2corvus.h"
#include "imagedev/harddriv.h"
/***************************************************************************
PARAMETERS
***************************************************************************/
//**************************************************************************
// GLOBAL VARIABLES
//**************************************************************************
DEFINE_DEVICE_TYPE(A2BUS_CORVUS, a2bus_corvus_device, "a2corvus", "Corvus Flat Cable interface")
#define CORVUS_ROM_REGION "corvus_rom"
#define CORVUS_HD_TAG "corvushd"
ROM_START( corvus )
ROM_REGION(0x800, CORVUS_ROM_REGION, 0)
ROM_LOAD( "a4.7.u10", 0x0000, 0x0800, CRC(1cf6e32a) SHA1(dbd6efeb3b54c0523b8b4eda8b3d737413f6a91a) )
ROM_END
/***************************************************************************
FUNCTION PROTOTYPES
***************************************************************************/
//-------------------------------------------------
// device_add_mconfig - add device configuration
//-------------------------------------------------
MACHINE_CONFIG_START(a2bus_corvus_device::device_add_mconfig)
MCFG_DEVICE_ADD(CORVUS_HD_TAG, CORVUS_HDC, 0)
MCFG_HARDDISK_ADD("harddisk1")
MCFG_HARDDISK_INTERFACE("corvus_hdd")
MCFG_HARDDISK_ADD("harddisk2")
MCFG_HARDDISK_INTERFACE("corvus_hdd")
MCFG_HARDDISK_ADD("harddisk3")
MCFG_HARDDISK_INTERFACE("corvus_hdd")
MCFG_HARDDISK_ADD("harddisk4")
MCFG_HARDDISK_INTERFACE("corvus_hdd")
MACHINE_CONFIG_END
//-------------------------------------------------
// rom_region - device-specific ROM region
//-------------------------------------------------
const tiny_rom_entry *a2bus_corvus_device::device_rom_region() const
{
return ROM_NAME( corvus );
}
//**************************************************************************
// LIVE DEVICE
//**************************************************************************
a2bus_corvus_device::a2bus_corvus_device(const machine_config &mconfig, device_type type, const char *tag, device_t *owner, uint32_t clock) :
device_t(mconfig, type, tag, owner, clock),
device_a2bus_card_interface(mconfig, *this),
m_corvushd(*this, CORVUS_HD_TAG), m_rom(nullptr)
{
}
a2bus_corvus_device::a2bus_corvus_device(const machine_config &mconfig, const char *tag, device_t *owner, uint32_t clock) :
a2bus_corvus_device(mconfig, A2BUS_CORVUS, tag, owner, clock)
{
}
//-------------------------------------------------
// device_start - device-specific startup
//-------------------------------------------------
void a2bus_corvus_device::device_start()
{
m_rom = device().machine().root_device().memregion(this->subtag(CORVUS_ROM_REGION).c_str())->base();
}
void a2bus_corvus_device::device_reset()
{
}
/*-------------------------------------------------
read_c0nx - called for reads from this card's c0nx space
-------------------------------------------------*/
uint8_t a2bus_corvus_device::read_c0nx(uint8_t offset)
{
switch (offset)
{
case 0:
return m_corvushd->read(machine().dummy_space(), 0);
case 1:
return m_corvushd->status_r(machine().dummy_space(), 0);
default:
logerror("Corvus: read unhandled c0n%x (%s)\n", offset, machine().describe_context());
break;
}
return 0xff;
}
/*-------------------------------------------------
write_c0nx - called for writes to this card's c0nx space
-------------------------------------------------*/
void a2bus_corvus_device::write_c0nx(uint8_t offset, uint8_t data)
{
if (offset == 0)
{
m_corvushd->write(machine().dummy_space(), 0, data);
}
}
/*-------------------------------------------------
read_cnxx - called for reads from this card's cnxx space
-------------------------------------------------*/
uint8_t a2bus_corvus_device::read_cnxx(uint8_t offset)
{
// one slot image at the end of the ROM, it appears
return m_rom[offset+0x700];
}
/*-------------------------------------------------
read_c800 - called for reads from this card's c800 space
-------------------------------------------------*/
uint8_t a2bus_corvus_device::read_c800(uint16_t offset)
{
return m_rom[offset & 0x7ff];
}
|
Spidey01/sxe | tests/cmds/CommandTest.cpp | <reponame>Spidey01/sxe
/*-
* Copyright (c) 2019-current, <NAME> <<EMAIL>>
*
* This software is provided 'as-is', without any express or implied warranty.
* In no event will the authors be held liable for any damages arising from the
* use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must
* not claim that you wrote the original software. If you use this
* software in a product, an acknowledgment in the product
* documentation would be appreciated but is not required.
*
* 2. Altered source versions must be plainly marked as such, and must
* not be misrepresented as being the original software.
*
* 3. This notice may not be removed or altered from any source
* distribution.
*/
#include "./CommandTest.hpp"
#include <sxe/logging.hpp>
#include <sxe/stdheaders.hpp>
using std::string;
using sxe::cmds::Command;
static const string TAG = "CommandTest";
CPPUNIT_TEST_SUITE_NAMED_REGISTRATION(CommandTest, TAG);
CPPUNIT_REGISTRY_ADD_TO_DEFAULT(TAG);
class TestCommand : public Command
{
public:
bool invoked;
TestCommand()
: Command("test")
, invoked(false)
{
}
bool operator() (const argv& args) override
{
Log::xtrace(::TAG, "invoked = true");
invoked = true;
return Command::operator()(args);
}
};
void CommandTest::name()
{
Log::xtrace(TAG, "name()");
TestCommand test;
CPPUNIT_ASSERT(test.getName() == "test");
}
void CommandTest::compare()
{
Log::xtrace(TAG, "compare()");
TestCommand test;
Command not_test("not_test");
TestCommand other;
CPPUNIT_ASSERT(test != not_test);
CPPUNIT_ASSERT(test == other);
}
void CommandTest::invoke()
{
Log::xtrace(TAG, "invoke()");
TestCommand test;
CPPUNIT_ASSERT(test.invoked == false);
CPPUNIT_ASSERT_NO_THROW(test({}));
CPPUNIT_ASSERT(test.invoked == true);
}
void CommandTest::invokeWithArgs()
{
Log::xtrace(TAG, "setArgs()");
TestCommand test;
CPPUNIT_ASSERT(test.invoked == false);
CPPUNIT_ASSERT_NO_THROW(test({"Hello", "World!"}));
CPPUNIT_ASSERT(test.invoked == true);
}
|
Ssunhx/ginServer | middleware/jwt.go | package middleware
import (
"ginserver/utils"
"github.com/dgrijalva/jwt-go"
"github.com/gin-gonic/gin"
"net/http"
"time"
)
// 用户签名,不能泄漏
var Jwtkey = []byte("<KEY>")
type MyClaims struct {
Username string `json:"username"`
Password string `json:"password"`
jwt.StandardClaims
}
// 生成 JWT Token
func GenJwtToken(username string) (string, int) {
expiredTime := time.Now().Add(10 * time.Hour)
SetClaims := MyClaims{
Username: username,
//Password: "",
StandardClaims: jwt.StandardClaims{
ExpiresAt: expiredTime.Unix(), // 过期时间
Issuer: "ginblog", // 签名发行者
},
}
reqClaims := jwt.NewWithClaims(jwt.SigningMethodHS256, SetClaims)
token, err := reqClaims.SignedString(Jwtkey)
if err != nil {
return "", utils.ERROR
}
return token, utils.SUCCESS
}
// 验证 token
func CheckJwtToken(token string) (*MyClaims, int) {
settoken, _ := jwt.ParseWithClaims(token, &MyClaims{}, func(token *jwt.Token) (i interface{}, err error) {
return Jwtkey, nil
})
if key, ok := settoken.Claims.(*MyClaims); ok && settoken.Valid {
return key, utils.SUCCESS
} else {
return nil, utils.ERROR
}
}
// 验证 token
func VerifyJwtToken() gin.HandlerFunc {
return func(c *gin.Context) {
token := c.Request.Header.Get("authorization")
code := utils.SUCCESS
// request token is null
if token == "" {
code = utils.USER_TOKEN_NOT_EXIST
c.JSON(http.StatusOK, gin.H{
"code": code,
"message": utils.GetErrMsg(code),
"data": "",
})
}
key, CheckCode := CheckJwtToken(token)
// token 验证失败
if CheckCode == utils.ERROR {
code = utils.USER_TOKEN_VERIFY_ERROR
c.JSON(http.StatusOK, gin.H{
"code": code,
"message": utils.GetErrMsg(code),
"data": "",
})
c.Abort()
}
// token 过期
if time.Now().Unix() > key.ExpiresAt {
code = utils.USER_TOKEN_EXPIRED
c.JSON(http.StatusOK, gin.H{
"code": code,
"message": utils.GetErrMsg(code),
"data": "",
})
c.Abort()
}
// token 验证通过,继续后面的中间件处理
c.Set("username", key.Username)
c.Next()
}
}
|
test-mile/setu | testmile-setu/setu/dispatcher/testsession.py | <reponame>test-mile/setu
from .guiautomator_dispatcher import GuiAutomatorDispatcher
class TestSessionDispatcher:
def create_gui_automator_dispatcher(self, config, setu_id):
return GuiAutomatorDispatcher(config, setu_id)
def create_gui_element_dispatcher(self, atuomator_dispatcher, element_setu_id):
return atuomator_dispatcher.create_gui_element_dispatacher(element_setu_id)
|
democat3457/BetterWithAddons | src/main/java/betterwithaddons/interaction/InteractionBTWTweak.java | <filename>src/main/java/betterwithaddons/interaction/InteractionBTWTweak.java
package betterwithaddons.interaction;
import betterwithaddons.BetterWithAddons;
import betterwithaddons.block.ModBlocks;
import betterwithaddons.crafting.conditions.ConditionModule;
import betterwithaddons.crafting.recipes.DoorSawRecipe;
import betterwithaddons.handler.BurnHandler;
import betterwithaddons.handler.EggIncubationHandler;
import betterwithaddons.handler.SoapHandler;
import betterwithaddons.item.ModItems;
import betterwithaddons.lib.Reference;
import betterwithmods.common.BWMBlocks;
import betterwithmods.common.BWRegistry;
import betterwithmods.common.blocks.BlockAesthetic;
import betterwithmods.common.blocks.mechanical.BlockMechMachines;
import betterwithmods.common.items.ItemMaterial;
import betterwithmods.common.registry.block.recipe.BlockDropIngredient;
import betterwithmods.module.gameplay.miniblocks.MiniBlocks;
import betterwithmods.module.hardcore.world.HCBonemeal;
import betterwithmods.module.tweaks.MineshaftGeneration;
import betterwithmods.util.StackIngredient;
import com.google.common.collect.Lists;
import net.minecraft.block.BlockPlanks;
import net.minecraft.block.material.Material;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.EnumDyeColor;
import net.minecraft.item.ItemStack;
import net.minecraft.item.crafting.FurnaceRecipes;
import net.minecraft.item.crafting.IRecipe;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.RegistryEvent;
import net.minecraftforge.fml.common.registry.GameRegistry;
import net.minecraftforge.oredict.OreDictionary;
import net.minecraftforge.oredict.ShapelessOreRecipe;
import net.minecraftforge.registries.ForgeRegistry;
import java.util.Arrays;
import java.util.List;
public class InteractionBTWTweak extends Interaction {
public static boolean ENABLED = true;
public static boolean SAW_RECYCLING = true;
//public static boolean KILN_DOUBLING = true;
public static boolean EGG_INCUBATION = true;
public static boolean SLIPPERY_WHEN_WET = true;
public static boolean ASH_FERTILIZER = true;
public static boolean WOOL_RECYCLING = true;
public static boolean LOGS_SMELT_TO_ASH = true;
public static boolean LOGS_BURN_TO_ASH = true;
public static boolean REPLACE_WRITABLE_BOOK_RECIPE = true;
public static boolean RUSTY_MINESHAFTS = true;
public static boolean INFESTED_MINESHAFTS = true;
public static boolean BONEMEAL_TO_GLUE = false; //Disabled by default since it conflicts with Kibble
public static int WRITING_TABLE_COST = 1;
public static int EGG_INCUBATION_TIME = 5400;
@Override
protected String getName() {
return "addons.BTWTweak";
}
@Override
void setupConfig() {
ENABLED = loadPropBool("Enabled","Whether the BTWTweak module is on. DISABLING THIS WILL DISABLE THE WHOLE MODULE.",ENABLED);
SAW_RECYCLING = loadPropBool("SawRecycling","Many wooden blocks can be recycled by putting them infront of a saw, at a bit of a loss.",SAW_RECYCLING);
EGG_INCUBATION = loadPropBool("EggIncubation","Allows eggs to be incubated into chicken by placing them on a Block of Padding with a lit Light Block above.",EGG_INCUBATION);
SLIPPERY_WHEN_WET = loadPropBool("SlipperyWhenWet","Water running over blocks of soap will make them slippery.",SLIPPERY_WHEN_WET);
ASH_FERTILIZER = loadPropBool("AshFertilizer","Potash is a valid fertilizer.",ASH_FERTILIZER);
WOOL_RECYCLING = loadPropBool("WoolRecycling","Wool can be rendered back into it's components. You might want to disable this if you use mods that violate Hardcore Shearing.",WOOL_RECYCLING);
LOGS_SMELT_TO_ASH = loadPropBool("LogsSmeltToAsh","Logs burn into ash in a furnace. This only works if they wouldn't burn into anything else.",LOGS_SMELT_TO_ASH);
LOGS_BURN_TO_ASH = loadPropBool("LogsBurnToAsh","Logs burn into ash in world.",LOGS_BURN_TO_ASH);
REPLACE_WRITABLE_BOOK_RECIPE = loadPropBool("ReplaceWritableBookRecipe","Changes writable books to require the Ink and Quill item.",REPLACE_WRITABLE_BOOK_RECIPE);
RUSTY_MINESHAFTS = loadPropBool("RustedMineshafts","Rails in Mineshafts are rusted and melt down into much less iron.",RUSTY_MINESHAFTS);
INFESTED_MINESHAFTS = loadPropBool("InfestedMineshafts","Logs in Mineshafts are infested by Termites and crumble into sawdust when harvested.",INFESTED_MINESHAFTS);
BONEMEAL_TO_GLUE = loadPropBool("BonemealToGlue","Bonemeal can be turned to glue in a stoked Crucible.",BONEMEAL_TO_GLUE);
doesNotNeedRestart(() -> {
WRITING_TABLE_COST = loadPropInt("WritingTableCost","How many levels it costs to rename an item or create a nametag.",WRITING_TABLE_COST);
EGG_INCUBATION_TIME = loadPropInt("EggIncubationTime","How long it takes for an egg to hatch using incubation, in ticks.",EGG_INCUBATION_TIME);
});
}
@Override
public boolean isActive() {
return ENABLED;
}
@Override
public void setEnabled(boolean active) {
ENABLED = active;
super.setEnabled(active);
}
@Override
public List<Interaction> getDependencies() {
return Arrays.asList(new Interaction[]{ ModInteractions.bwm });
}
@Override
public List<Interaction> getIncompatibilities() {
return null;
}
@Override
public void preInit() {
if(SLIPPERY_WHEN_WET)
MinecraftForge.EVENT_BUS.register(new SoapHandler(BWMBlocks.AESTHETIC.getDefaultState().withProperty(BlockAesthetic.TYPE, BlockAesthetic.EnumType.SOAP)));
if(EGG_INCUBATION)
MinecraftForge.EVENT_BUS.register(new EggIncubationHandler());
if(REPLACE_WRITABLE_BOOK_RECIPE)
BetterWithAddons.removeCraftingRecipe(new ResourceLocation("minecraft","writable_book"));
if(LOGS_BURN_TO_ASH)
MinecraftForge.EVENT_BUS.register(BurnHandler.class);
ConditionModule.MODULES.put("ReplaceWritableBookRecipe",() -> REPLACE_WRITABLE_BOOK_RECIPE);
}
@Override
void modifyRecipes(RegistryEvent.Register<IRecipe> event) {
ForgeRegistry<IRecipe> registry = (ForgeRegistry<IRecipe>) event.getRegistry();
//Temp conversion recipe
ResourceLocation resloc = new ResourceLocation(Reference.MOD_ID, "ink_and_quill_conversion");
registry.register(new ShapelessOreRecipe(resloc,new ItemStack(ModItems.INK_AND_QUILL),new Object[]{ModItems.MATERIAL_TWEAK.getMaterial("ink_and_quill")}).setRegistryName(resloc));
}
@Override
public void init() {
if(ASH_FERTILIZER) {
StackIngredient.mergeStacked(Lists.newArrayList(HCBonemeal.FERTILIZERS, StackIngredient.fromOre("dustPotash"), StackIngredient.fromOre("dustAsh")));
}
ModItems.MATERIAL_TWEAK.setDisabled("ink_and_quill"); //Deprecated
if(RUSTY_MINESHAFTS)
MineshaftGeneration.rail = piece -> ModBlocks.RUSTY_RAIL.getDefaultState();
if(INFESTED_MINESHAFTS)
MineshaftGeneration.supports = piece -> ModBlocks.TERMITE_LOG.getDefaultState();
BWRegistry.CRUCIBLE.addStokedRecipe(new ItemStack(ModBlocks.RUSTY_RAIL,2),new ItemStack(Items.IRON_NUGGET));
BWRegistry.WOOD_SAW.addRecipe(new ItemStack(ModBlocks.TERMITE_LOG,1,OreDictionary.WILDCARD_VALUE), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.SAWDUST,2));
if(WOOL_RECYCLING && InteractionBWM.HARDCORE_SHEARING)
{
for (EnumDyeColor color: EnumDyeColor.values()) {
BWRegistry.CAULDRON.addStokedRecipe(new ItemStack(Blocks.WOOL,1,color.getMetadata()), new ItemStack(BWMBlocks.AESTHETIC,1,BlockAesthetic.EnumType.WICKER.getMeta()), ModItems.WOOL.getByColor(color,4)).setPriority(-1);
}
}
if(BONEMEAL_TO_GLUE) {
BWRegistry.CAULDRON.addStokedRecipe(StackIngredient.fromStacks(new ItemStack(Items.DYE,64,EnumDyeColor.WHITE.getDyeDamage())),ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.GLUE));
}
if(SAW_RECYCLING) {
BWRegistry.WOOD_SAW.addRecipe(new ItemStack(Blocks.BOOKSHELF), Lists.newArrayList(getSiding(BlockPlanks.EnumType.OAK,4),new ItemStack(Items.BOOK,3)));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(Blocks.CHEST)),getSiding(BlockPlanks.EnumType.OAK,6));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(Blocks.JUKEBOX)),Lists.newArrayList(getSiding(BlockPlanks.EnumType.OAK,6),new ItemStack(Items.DIAMOND,1)));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(Blocks.LADDER)),new ItemStack(Items.STICK,2));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(Blocks.NOTEBLOCK)),Lists.newArrayList(getSiding(BlockPlanks.EnumType.OAK,6),new ItemStack(Items.REDSTONE,1)));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(Blocks.TRAPDOOR)),getSiding(BlockPlanks.EnumType.OAK,2));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(BWMBlocks.WOODEN_AXLE)),Lists.newArrayList(getCorner(BlockPlanks.EnumType.OAK,2),new ItemStack(BWMBlocks.ROPE,1)));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(BWMBlocks.BELLOWS)),Lists.newArrayList(getSiding(BlockPlanks.EnumType.OAK,2), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.LEATHER_BELT), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.GEAR), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.TANNED_LEATHER_CUT,3)));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(BWMBlocks.WOODEN_GEARBOX)),Lists.newArrayList(getSiding(BlockPlanks.EnumType.OAK,3), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.GEAR,3), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.REDSTONE_LATCH)));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(BlockMechMachines.getStack(BlockMechMachines.EnumType.HOPPER)),Lists.newArrayList(getMoulding(BlockPlanks.EnumType.OAK,3), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.GEAR,1), new ItemStack(Blocks.WOODEN_PRESSURE_PLATE,1)));
BWRegistry.WOOD_SAW.addRecipe(new ItemStack(BWMBlocks.PLATFORM),Lists.newArrayList(getMoulding(BlockPlanks.EnumType.OAK,3), new ItemStack(BWMBlocks.WICKER,2)));
BWRegistry.WOOD_SAW.addRecipe(BlockMechMachines.getStack(BlockMechMachines.EnumType.PULLEY),Lists.newArrayList(getSiding(BlockPlanks.EnumType.OAK,3), new ItemStack(Items.IRON_INGOT), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.GEAR), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.REDSTONE_LATCH)));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(BWMBlocks.SAW)),Lists.newArrayList(getSiding(BlockPlanks.EnumType.OAK,1), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.LEATHER_BELT), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.GEAR), new ItemStack(Items.IRON_INGOT, 2)));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(BWMBlocks.PUMP)),Lists.newArrayList(getSiding(BlockPlanks.EnumType.OAK,3), ItemMaterial.getMaterial(ItemMaterial.EnumMaterial.SCREW), new ItemStack(BWMBlocks.GRATE, 1, BlockPlanks.EnumType.OAK.getMetadata())));
BlockPlanks.EnumType[] woodtypes = BlockPlanks.EnumType.values();
for (BlockPlanks.EnumType woodtype : woodtypes) {
BWRegistry.WOOD_SAW.addRecipe(new ItemStack(BWMBlocks.WOOD_BENCH, 1, woodtype.getMetadata()), getCorner(woodtype,2));
BWRegistry.WOOD_SAW.addRecipe(new ItemStack(BWMBlocks.WOOD_TABLE, 1, woodtype.getMetadata()), getCorner(woodtype,3));
}
BWRegistry.WOOD_SAW.addRecipe(new DoorSawRecipe(Blocks.OAK_DOOR,Lists.newArrayList(getSiding(BlockPlanks.EnumType.OAK,4)),new ItemStack(Items.OAK_DOOR)));
BWRegistry.WOOD_SAW.addRecipe(new DoorSawRecipe(Blocks.BIRCH_DOOR,Lists.newArrayList(getSiding(BlockPlanks.EnumType.BIRCH,4)),new ItemStack(Items.BIRCH_DOOR)));
BWRegistry.WOOD_SAW.addRecipe(new DoorSawRecipe(Blocks.SPRUCE_DOOR,Lists.newArrayList(getSiding(BlockPlanks.EnumType.SPRUCE,4)),new ItemStack(Items.SPRUCE_DOOR)));
BWRegistry.WOOD_SAW.addRecipe(new DoorSawRecipe(Blocks.JUNGLE_DOOR,Lists.newArrayList(getSiding(BlockPlanks.EnumType.JUNGLE,4)),new ItemStack(Items.JUNGLE_DOOR)));
BWRegistry.WOOD_SAW.addRecipe(new DoorSawRecipe(Blocks.ACACIA_DOOR,Lists.newArrayList(getSiding(BlockPlanks.EnumType.ACACIA,4)),new ItemStack(Items.ACACIA_DOOR)));
BWRegistry.WOOD_SAW.addRecipe(new DoorSawRecipe(Blocks.DARK_OAK_DOOR,Lists.newArrayList(getSiding(BlockPlanks.EnumType.DARK_OAK,4)),new ItemStack(Items.DARK_OAK_DOOR)));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(Blocks.OAK_FENCE_GATE)),getMoulding(BlockPlanks.EnumType.OAK,3));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(Blocks.BIRCH_FENCE_GATE)),getMoulding(BlockPlanks.EnumType.BIRCH,3));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(Blocks.SPRUCE_FENCE_GATE)),getMoulding(BlockPlanks.EnumType.SPRUCE,3));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(Blocks.JUNGLE_FENCE_GATE)),getMoulding(BlockPlanks.EnumType.JUNGLE,3));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(Blocks.ACACIA_FENCE_GATE)),getMoulding(BlockPlanks.EnumType.ACACIA,3));
BWRegistry.WOOD_SAW.addRecipe(new BlockDropIngredient(new ItemStack(Blocks.DARK_OAK_FENCE_GATE)),getMoulding(BlockPlanks.EnumType.DARK_OAK,3));
}
}
private ItemStack getSiding(BlockPlanks.EnumType type, int count) {
return MiniBlocks.fromParent(MiniBlocks.SIDINGS.get(Material.WOOD),Blocks.PLANKS.getDefaultState().withProperty(BlockPlanks.VARIANT,type),count);
}
private ItemStack getMoulding(BlockPlanks.EnumType type, int count) {
return MiniBlocks.fromParent(MiniBlocks.MOULDINGS.get(Material.WOOD),Blocks.PLANKS.getDefaultState().withProperty(BlockPlanks.VARIANT,type),count);
}
private ItemStack getCorner(BlockPlanks.EnumType type, int count) {
return MiniBlocks.fromParent(MiniBlocks.CORNERS.get(Material.WOOD),Blocks.PLANKS.getDefaultState().withProperty(BlockPlanks.VARIANT,type),count);
}
@Override
public void postInit() {
if(LOGS_SMELT_TO_ASH) {
for (ItemStack log : OreDictionary.getOres("logWood")) {
ItemStack result = FurnaceRecipes.instance().getSmeltingResult(log);
if(result.isEmpty())
GameRegistry.addSmelting(log,ModItems.MATERIAL_TWEAK.getMaterial("ash"),0.1f);
}
}
/*if(KILN_DOUBLING && ModuleLoader.isFeatureEnabled(KilnSmelting.class))
{
for (ItemStack ore : BWOreDictionary.oreNames) {
if(ore.getItem() instanceof ItemBlock)
{
BlockMetaRecipe recipe = KilnManager.INSTANCE.getRecipe(ore);
List<ItemStack> outputs = recipe.getOutputs();
if(outputs.size() > 0)
{
ItemStack output = outputs.get(0).copy();
output.setCount(MathHelper.clamp(output.getCount() * 2,0,output.getMaxStackSize()));
outputs.set(0,output);
}
}
}
}*/
}
}
|
Xenuzever/sit-wt-all | sit-wt-util/src/main/java/io/sitoolkit/wt/util/infra/util/OverwriteChecker.java |
package io.sitoolkit.wt.util.infra.util;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import javafx.application.Platform;
import javafx.scene.control.ChoiceDialog;
/**
* このクラスは、ファイルを上書き可否をユーザーに確認するためのクラスです。
*/
public class OverwriteChecker {
private Writable allWritable = Writable.NA;
private boolean rebuild;
public boolean isWritable(Path path) {
if (!Files.exists(path)) {
return true;
}
if (rebuild) {
return true;
}
if (Writable.No.equals(allWritable)) {
return false;
} else if (Writable.Yes.equals(allWritable)) {
return true;
}
Answer answer = confirmOverwriteInFxApplicationThread(path);
allWritable = answer.allWritable;
return Writable.Yes.equals(answer.writable);
}
private Answer confirmOverwriteInFxApplicationThread(Path path) {
if (Platform.isFxApplicationThread()) {
return confirmOverwrite(path);
}
FutureTask<Answer> task = new FutureTask<>(() -> {
return confirmOverwrite(path);
});
Platform.runLater(task);
try {
return task.get();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
}
private Answer confirmOverwrite(Path path) {
ChoiceDialog<Answer> choice = new ChoiceDialog<>(Answer.n, Answer.values());
choice.setHeaderText("SIToolkit ファイル上書き確認");
choice.setContentText("書込み先にファイルが存在します。\n" + path.toAbsolutePath());
return choice.showAndWait().orElse(Answer.n);
}
enum Writable {
Yes, No, NA
}
enum Answer {
//@formatter:off
y("上書き", Writable.Yes, Writable.NA),
a("以降全て上書き", Writable.Yes, Writable.Yes),
n("上書きしない", Writable.No, Writable.NA),
q("以降全て上書きしない", Writable.No, Writable.NA),
;
//@formatter:on
final String description;
final Writable writable;
final Writable allWritable;
private Answer(String description, Writable writable, Writable allWritable) {
this.description = description;
this.writable = writable;
this.allWritable = allWritable;
}
@Override
public String toString() {
return description;
}
}
public boolean isRebuild() {
return rebuild;
}
public void setRebuild(boolean rebuild) {
this.rebuild = rebuild;
}
}
|
prathamesh-sonpatki/jruby | bench/yarv/bm_loop_times.rb | <filename>bench/yarv/bm_loop_times.rb
30000000.times{|e|}
|
Twinparadox/AlgorithmProblem | Baekjoon/2056.cpp | #include <algorithm>
#include <iostream>
#include <queue>
#include <vector>
using namespace std;
int main(void)
{
cin.tie(0); cout.tie(0); ios_base::sync_with_stdio(false);
int N, ans = 0;
vector<vector<int> > adj;
vector<int> ind, arr, dp;
queue<int> q;
cin >> N;
adj.resize(N + 1);
ind.resize(N + 1, 0);
arr.resize(N + 1, 0);
dp.resize(N + 1, 0);
int K, X, needs;
for (int i = 1; i <= N; i++)
{
cin >> K >> X;
arr[i] = K;
for (int j = 0; j < X; j++)
{
cin >> needs;
adj[needs].push_back(i);
ind[i]++;
}
}
for (int i = 1; i <= N; i++)
{
if (!ind[i])
{
q.push(i);
dp[i] = arr[i];
}
}
while (!q.empty())
{
int cur = q.front();
q.pop();
int size = adj[cur].size(), next;
for (int i = 0; i < size; i++)
{
next = adj[cur][i];
dp[next] = max(dp[next], dp[cur] + arr[next]);
if (--ind[next] == 0)
q.push(next);
}
}
for (int i = 1; i <= N; i++)
ans = max(ans, dp[i]);
cout << ans;
} |
qafedev/qafe-platform | qafe-web-gwt/src/main/java/com/qualogy/qafe/gwt/client/component/QMultiWordSuggestion.java | /**
* Copyright 2008-2017 Qualogy Solutions B.V.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.qualogy.qafe.gwt.client.component;
import com.google.gwt.user.client.ui.MultiWordSuggestOracle.MultiWordSuggestion;
import com.qualogy.qafe.gwt.client.vo.functions.DataContainerGVO;
public class QMultiWordSuggestion extends MultiWordSuggestion {
public static final String DISPLAYFIELD_DEFAULT = "value";
private DataContainerGVO data = null;
public DataContainerGVO getData() {
return data;
}
public QMultiWordSuggestion(DataContainerGVO data) {
this(data, DISPLAYFIELD_DEFAULT);
}
public QMultiWordSuggestion(DataContainerGVO data, String displayField) {
super(data.getDataMap().get(displayField).toString(), data.getDataMap().get(displayField).toString());
this.data = data;
}
}
|
nexus-devs/cubic | test/requests.js | const assert = require('assert')
const Client = require(`${process.cwd()}/packages/client`)
const Endpoint = require('cubic-api/endpoint')
/**
* Tests for properly responding to usual requests.
*/
describe('Requests', function () {
let client, uiClient, options
before(async function () {
client = new Client()
uiClient = new Client({ api_url: 'ws://localhost:3000/ws' })
const endpoints = cubic.nodes.api.server.ws.endpoints
const db = this.db = (await endpoints.db).db(endpoints.config.mongoDb)
options = { db, cache: endpoints.cache, ws: endpoints.ws }
await client.awaitConnection()
})
// GET check
it('should respond with "bar" on GET /foo', async function () {
const res = await client.get('/foo')
assert(res === 'bar')
})
// POST check on same URL
it('should respond with "foo" on POST /foo', async function () {
const payload = 'foo'
const res = await client.post('/foo', payload)
assert(res === payload)
})
// Raw file check
it('should send buffer of guy fieri on GET /guy-fieri.jpg', async function () {
const guy = await uiClient.get('/guy-fieri.jpg')
assert(guy.type === 'Buffer' || guy instanceof Buffer)
})
// Pub/Sub
it('should emit event with "foo" on /test when published.', function (done) {
const endpoint = new Endpoint({ ...options, ...{ url: '/test' } })
client.subscribe('/test', foo => {
assert(foo === 'foo')
done()
})
// Give subscribe request enough time to arrive first. Hardcoding it like this
// because something is probably terribly wrong if it still didn't subscribe
// after a full second.
setTimeout(() => endpoint.publish('foo'), 1000)
})
it('should correctly process multiple parallel requests', async function () {
const parallel = []
for (let i = 0; i < 10; i++) parallel.push(client.get('/foo'))
const res = await Promise.all(parallel)
assert(res.filter(r => r !== 'bar').length === 0)
})
})
|
avelez93/tfx | tfx/orchestration/portable/importer_node_handler_test.py | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.orchestration.portable.importer_node_handler."""
import os
import tensorflow as tf
from tfx import version as tfx_version
from tfx.dsl.compiler import constants
from tfx.orchestration import metadata
from tfx.orchestration.portable import importer_node_handler
from tfx.orchestration.portable import runtime_parameter_utils
from tfx.proto.orchestration import pipeline_pb2
from tfx.utils import test_case_utils
class ImporterNodeHandlerTest(test_case_utils.TfxTest):
def setUp(self):
super().setUp()
pipeline_root = os.path.join(
os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()),
self.id())
# Makes sure multiple connections within a test always connect to the same
# MLMD instance.
metadata_path = os.path.join(pipeline_root, 'metadata', 'metadata.db')
connection_config = metadata.sqlite_metadata_connection_config(
metadata_path)
connection_config.sqlite.SetInParent()
self._mlmd_connection = metadata.Metadata(
connection_config=connection_config)
self._testdata_dir = os.path.join(os.path.dirname(__file__), 'testdata')
# Sets up pipelines
pipeline = pipeline_pb2.Pipeline()
self.load_proto_from_text(
os.path.join(
os.path.dirname(__file__), 'testdata',
'pipeline_for_launcher_test.pbtxt'), pipeline)
self._pipeline_info = pipeline.pipeline_info
self._pipeline_runtime_spec = pipeline.runtime_spec
runtime_parameter_utils.substitute_runtime_parameter(
pipeline, {
constants.PIPELINE_RUN_ID_PARAMETER_NAME: 'my_pipeline_run',
})
# Extracts components
self._importer = pipeline.nodes[3].pipeline_node
# Fake tfx_version for tests.
tfx_version.__version__ = '0.123.4.dev'
def testLauncher_importer_mode_reimport_enabled(self):
handler = importer_node_handler.ImporterNodeHandler()
execution_info = handler.run(
mlmd_connection=self._mlmd_connection,
pipeline_node=self._importer,
pipeline_info=self._pipeline_info,
pipeline_runtime_spec=self._pipeline_runtime_spec)
with self._mlmd_connection as m:
[artifact] = m.store.get_artifacts_by_type('Schema')
self.assertProtoPartiallyEquals(
"""
id: 1
uri: "my_url"
custom_properties {
key: "int_custom_property"
value {
int_value: 123
}
}
custom_properties {
key: "str_custom_property"
value {
string_value: "abc"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "0.123.4.dev"
}
}
state: LIVE""",
artifact,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch'
])
[execution] = m.store.get_executions_by_id([execution_info.execution_id])
self.assertProtoPartiallyEquals(
"""
id: 1
last_known_state: COMPLETE
custom_properties {
key: "artifact_uri"
value {
string_value: "my_url"
}
}
custom_properties {
key: "reimport"
value {
int_value: 1
}
}
""",
execution,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch'
])
execution_info = handler.run(
mlmd_connection=self._mlmd_connection,
pipeline_node=self._importer,
pipeline_info=self._pipeline_info,
pipeline_runtime_spec=self._pipeline_runtime_spec)
with self._mlmd_connection as m:
new_artifact = m.store.get_artifacts_by_type('Schema')[1]
self.assertProtoPartiallyEquals(
"""
id: 2
uri: "my_url"
custom_properties {
key: "int_custom_property"
value {
int_value: 123
}
}
custom_properties {
key: "str_custom_property"
value {
string_value: "abc"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "0.123.4.dev"
}
}
state: LIVE""",
new_artifact,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch'
])
[execution] = m.store.get_executions_by_id([execution_info.execution_id])
self.assertProtoPartiallyEquals(
"""
id: 2
last_known_state: COMPLETE
custom_properties {
key: "artifact_uri"
value {
string_value: "my_url"
}
}
custom_properties {
key: "reimport"
value {
int_value: 1
}
}
""",
execution,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch'
])
def testLauncher_importer_mode_reimport_disabled(self):
self._importer.parameters.parameters['reimport'].field_value.int_value = 0
handler = importer_node_handler.ImporterNodeHandler()
execution_info = handler.run(
mlmd_connection=self._mlmd_connection,
pipeline_node=self._importer,
pipeline_info=self._pipeline_info,
pipeline_runtime_spec=self._pipeline_runtime_spec)
with self._mlmd_connection as m:
[artifact] = m.store.get_artifacts_by_type('Schema')
self.assertProtoPartiallyEquals(
"""
id: 1
uri: "my_url"
custom_properties {
key: "int_custom_property"
value {
int_value: 123
}
}
custom_properties {
key: "str_custom_property"
value {
string_value: "abc"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "0.123.4.dev"
}
}
state: LIVE""",
artifact,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch'
])
[execution] = m.store.get_executions_by_id([execution_info.execution_id])
self.assertProtoPartiallyEquals(
"""
id: 1
last_known_state: COMPLETE
custom_properties {
key: "artifact_uri"
value {
string_value: "my_url"
}
}
custom_properties {
key: "reimport"
value {
int_value: 0
}
}
""",
execution,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch'
])
# Run the 2nd execution. Since the reimport is disabled, no new schema
# is imported and the corresponding execution is published as CACHED.
execution_info = handler.run(
mlmd_connection=self._mlmd_connection,
pipeline_node=self._importer,
pipeline_info=self._pipeline_info,
pipeline_runtime_spec=self._pipeline_runtime_spec)
with self._mlmd_connection as m:
# No new Schema is produced.
self.assertLen(m.store.get_artifacts_by_type('Schema'), 1)
[execution] = m.store.get_executions_by_id([execution_info.execution_id])
self.assertProtoPartiallyEquals(
"""
id: 2
last_known_state: CACHED
custom_properties {
key: "artifact_uri"
value {
string_value: "my_url"
}
}
custom_properties {
key: "reimport"
value {
int_value: 0
}
}
""",
execution,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch'
])
if __name__ == '__main__':
tf.test.main()
|
snusnu/merb_resource_controller | spec/mrc_test_app/spec/spec_helper.rb | $:.push File.join(File.dirname(__FILE__), '..', 'lib')
require "rubygems"
# Add the local gems dir if found within the app root; any dependencies loaded
# hereafter will try to load from the local gems before loading system gems.
if (local_gem_dir = File.join(File.dirname(__FILE__), '..', 'gems')) && $BUNDLE.nil?
$BUNDLE = true; Gem.clear_paths; Gem.path.unshift(local_gem_dir)
end
require "merb-core"
require "spec" # Satisfies Autotest and anyone else not using the Rake tasks
# this loads all plugins required in your init file so don't add them
# here again, Merb will do it for you
Merb.start_environment(
:merb_root => File.join(File.dirname(__FILE__), '..'),
:environment => 'test'
)
Spec::Runner.configure do |config|
config.include Merb::Test::RequestHelper
config.include(Merb::Test::ControllerHelper)
config.include(Merb::Test::ViewHelper)
config.include(Merb::Test::RouteHelper)
end
# -----------------------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------------------
given "an Article exists" do
DataMapper.auto_migrate!
request(
resource(:articles),
:method => "POST",
:params => {
:article => {
:id => nil,
:title => "article title",
:body => "article body"
}
}
)
end
given "an Editor exists" do
DataMapper.auto_migrate!
Editor.create({ :id => nil, :name => "snusnu" })
request(
resource(:articles),
:method => "POST",
:params => {
:article => {
:id => nil,
:editor_id => Editor.first.id,
:title => "article title",
:body => "article body"
}
}
)
end
given "a Comment exists" do
DataMapper.auto_migrate!
request(
resource(:articles),
:method => "POST",
:params => {
:article => {
:id => nil,
:title => "article title",
:body => "article body"
}
}
)
request(
resource(:comments),
:method => "POST",
:params => {
:comment => {
:id => nil,
:article_id => Article.first.id,
:body => "comment body"
}
}
)
end
given "a Rating exists" do
DataMapper.auto_migrate!
request(
resource(:articles),
:method => "POST",
:params => {
:article => {
:id => nil,
:title => "article title",
:body => "article body"
}
}
)
request(
resource(Article.first, :comments),
:method => "POST",
:params => {
:comment => {
:id => nil,
:body => "comment body"
}
}
)
request(
resource(Article.first, Community::Comment.first, :ratings),
:method => "POST",
:params => {
:rating => {
:id => nil,
:rate => 1
}
}
)
end
given "3 Ratings exist" do
DataMapper.auto_migrate!
request(
resource(:articles),
:method => "POST",
:params => {
:article => {
:title => "article title",
:body => "article body"
}
}
)
2.times do
request(
resource(Article.first, :comments),
:method => "POST",
:params => {
:comment => {
:body => "comment body"
}
}
)
end
2.times do
request(
resource(Article.first, Community::Comment.first, :ratings),
:method => "POST",
:params => {
:rating => {
:rate => 1
}
}
)
end
request(
resource(Article.first, Community::Comment.all.last, :ratings),
:method => "POST",
:params => {
:rating => {
:rate => 1
}
}
)
end
given "2 articles and 3 comments exist" do
DataMapper.auto_migrate!
2.times do
request(
resource(:articles),
:method => "POST",
:params => {
:article => {
:id => nil,
:title => "article title",
:body => "article body"
}
}
)
end
2.times do
request(
resource(Article.first, :comments),
:method => "POST",
:params => {
:comment => {
:id => nil,
:body => "comment body"
}
}
)
end
request(
resource(Article.all.last, :comments),
:method => "POST",
:params => {
:comment => {
:id => nil,
:body => "comment body"
}
}
)
end |
bckrm/gianna-biscontini | web/src/components/internalLink.js | import React from 'react';
import PropTypes from 'prop-types';
// import styled from 'styled-components';
// import tw from 'twin.macro';
import { Link } from 'gatsby';
export default function InternalLink({ data }) {
const {
link: { pageTitle },
linkText,
} = data;
const link = pageTitle.toLowerCase();
return (
<Link
className="link relative text-[28px] tracking-[.195rem] uppercase"
style={{ lineHeight: '90%' }}
to={`/${link}`}
>
{linkText}
</Link>
);
}
InternalLink.propTypes = {
data: PropTypes.object.isRequired,
};
|
kle97/enrollment-system | src/main/java/io/spring/enrollmentsystem/feature/authentication/InvalidAccessTokenException.java | package io.spring.enrollmentsystem.feature.authentication;
public class InvalidAccessTokenException extends RuntimeException{
public InvalidAccessTokenException() {
super("Invalid or missing access token, please log in again!");
}
public InvalidAccessTokenException(Throwable cause) {
super("Invalid or missing access token, please log in again!", cause);
}
public InvalidAccessTokenException(String message) {
super(message);
}
public InvalidAccessTokenException(String message, Throwable cause) {
super(message, cause);
}
}
|
joluxer/NpfsCpp | src/Npfs/Resources/AsciiDecimalInt16VarSerDes.cpp | <gh_stars>0
/*
* AsciiDecimalInt16VarSerDes.cpp
*
* Created on: 29.09.2012
* Author: lode
*/
#include "AsciiDecimalInt16VarSerDes.h"
#include "Util/PrintfToStream.h"
#include "Util/StringBufferStream.h"
#include <assert.h>
#include <stdlib.h>
namespace Npfs
{
AsciiDecimalInt16VarSerDes::AsciiDecimalInt16VarSerDes(VarType& myVar)
: variable(myVar)
{}
unsigned AsciiDecimalInt16VarSerDes::serializeTo(unsigned char* buffer, unsigned bufferLength)
{
assert(bufferLength >= DataLength_bytes);
StringBufferStream stringBuffer(buffer, bufferLength);
PrintfToStream fString(stringBuffer);
auto n = fString.printf("%6d\n", variable);
if (n > int(bufferLength))
n = bufferLength;
return n;
}
bool AsciiDecimalInt16VarSerDes::deserializeFrom(const unsigned char* buffer, unsigned bufferLength)
{
bool success = false;
long i;
char* tail;
i = strtol((const char*) buffer, &tail, 10);
if (tail > (char*)buffer)
{
variable = static_cast<VarType>(i);
success = true;
}
return success;
}
}
|
2bitsin/Subor | Sources/core/RicohCpuImpl.hpp | <reponame>2bitsin/Subor
#include "RicohCpu.hpp"
template <auto ... _Test, typename _Value>
constexpr bool is_in (_Value&& value)
{
return (... || (value == _Test));
}
template <BusOperation _Operation, typename _Host, typename _Value>
inline auto RicohCPU::tick (_Host&& m, word addr, _Value&& data)
{
byte discard = 0;
if constexpr (_Operation == kDummyPeek)
return tick<kPeek> (m, addr, discard);
if constexpr (_Operation == kDummyPoke)
return tick<kPoke> (m, addr, data);
++q.cnt_clock;
if constexpr (_Operation == kPoke)
{
if (addr == 0x4014u)
{
q.rDma.h = (byte)data;
q.mode.dmaStart = 1;
return kSuccess;
}
}
return m.template tick<_Operation> (*this, addr, data);
}
template <typename _Host, typename _ShouldStop>
inline bool RicohCPU::stepUntil (_Host&& m, _ShouldStop&& s)
{
byte discard;
while (!s(*this, m))
{
word next = 0u;
bool cross = false;
if (q.mode.stall)
continue;
else if (q.mode.dmaStart)
{
tick<kDummyPeek> (m, q.rDma.w, next);
if (q.cnt_clock & 1u)
tick<kDummyPeek> (m, q.rDma.w, next);
q.rDma.l = 0x0u;
q.mode.dmaStart = 0;
q.mode.dmaCycle = 1;
continue;
}
else if (q.mode.dmaCycle)
{
tick<kPeek> (m, q.rDma.w, next);
tick<kPoke> (m, 0x2004u, next);
if (!++q.rDma.l)
q.mode.dmaCycle = 0;
continue;
}
else if (std::exchange(q.mode.nmi, 0u))
next = 0x101u;
else if (std::exchange(q.mode.rst, 0u))
next = 0x102u;
else if (q.mode.irq && !q.p.i)
next = 0x100u;
else
{
if (q.mode.wait)
continue;
tick<kPeek> (m, q.pc.w++, next);
}
switch (next)
{
case 0x000:// BRK
case 0x100:// IRQ
case 0x101:// NMI
case 0x102:// RST
break;
// Implied
case 0x40:
case 0x60:
case 0x08:
case 0x18:
case 0x28:
case 0x38:
case 0x48:
case 0x58:
case 0x68:
case 0x78:
case 0x88:
case 0x98:
case 0xA8:
case 0xB8:
case 0xC8:
case 0xD8:
case 0xE8:
case 0xF8:
case 0x0A:
case 0x1A:
case 0x2A:
case 0x3A:
case 0x4A:
case 0x5A:
case 0x6A:
case 0x7A:
case 0x8A:
case 0x9A:
case 0xAA:
case 0xBA:
case 0xCA:
case 0xDA:
case 0xEA:
case 0xFA:
tick<kDummyPeek> (m, q.pc.w, discard);
break;
// Immediate
case 0x02:
case 0x12:
case 0x22:
case 0x32:
case 0x42:
case 0x52:
case 0x62:
case 0x72:
case 0x82:
case 0x92:
case 0xA2:
case 0xB2:
case 0xC2:
case 0xD2:
case 0xE2:
case 0xF2:
case 0x0B:
case 0x2B:
case 0x4B:
case 0x6B:
case 0x8B:
case 0xAB:
case 0xCB:
case 0xEB:
case 0x09:
case 0x29:
case 0x49:
case 0x69:
case 0x80:
case 0x89:
case 0xC0:
case 0xC9:
case 0xA0:
case 0xA9:
case 0xE0:
case 0xE9:
q.addr.w = q.pc.w++;
break;
// Zero Page
case 0x04:
case 0x05:
case 0x06:
case 0x07:
case 0x24:
case 0x25:
case 0x26:
case 0x27:
case 0x44:
case 0x45:
case 0x46:
case 0x47:
case 0x64:
case 0x65:
case 0x66:
case 0x67:
case 0x84:
case 0x85:
case 0x86:
case 0x87:
case 0xA4:
case 0xA5:
case 0xA6:
case 0xA7:
case 0xC4:
case 0xC5:
case 0xC6:
case 0xC7:
case 0xE4:
case 0xE5:
case 0xE6:
case 0xE7:
tick<kPeek> (m, q.pc.w++, q.addr.w);
break;
// Zero Page, X
case 0x14:
case 0x15:
case 0x16:
case 0x17:
case 0x34:
case 0x35:
case 0x36:
case 0x37:
case 0x54:
case 0x55:
case 0x56:
case 0x57:
case 0x74:
case 0x75:
case 0x76:
case 0x77:
case 0x94:
case 0x95:
case 0xB4:
case 0xB5:
case 0xD4:
case 0xD5:
case 0xD6:
case 0xD7:
case 0xF4:
case 0xF5:
case 0xF6:
case 0xF7:
tick<kPeek> (m, q.pc.w++, q.addr.w);
q.addr.l += q.x;
tick<kDummyPeek> (m, q.addr.w, discard);
break;
// Zero Page, Y
case 0x96:
case 0x97:
case 0xB7:
case 0xB6:
tick<kPeek> (m, q.pc.w++, q.addr.w);
q.addr.l += q.y;
tick<kDummyPeek> (m, q.addr.w, discard);
break;
// Absolute
case 0x0C:
case 0x0D:
case 0x0E:
case 0x0F:
case 0x20:
case 0x2C:
case 0x2D:
case 0x2E:
case 0x2F:
case 0x4C:
case 0x4D:
case 0x4E:
case 0x4F:
case 0x6D:
case 0x6E:
case 0x6F:
case 0x8C:
case 0x8D:
case 0x8E:
case 0x8F:
case 0xAC:
case 0xAD:
case 0xAE:
case 0xAF:
case 0xCC:
case 0xCD:
case 0xCE:
case 0xCF:
case 0xEC:
case 0xED:
case 0xEE:
case 0xEF:
tick<kPeek> (m, q.pc.w++, q.addr.l);
tick<kPeek> (m, q.pc.w++, q.addr.h);
break;
// Absolute, X
case 0x1C:
case 0x1D:
case 0x1E:
case 0x1F:
case 0x3C:
case 0x3D:
case 0x3E:
case 0x3F:
case 0x5C:
case 0x5D:
case 0x5E:
case 0x5F:
case 0x7C:
case 0x7D:
case 0x7E:
case 0x7F:
case 0x9C: // SYA
case 0x9D:
case 0xBC:
case 0xBD:
case 0xDC:
case 0xDD:
case 0xDE:
case 0xDF:
case 0xFC:
case 0xFD:
case 0xFE:
case 0xFF:
tick<kPeek> (m, q.pc.w++, q.addr.l);
tick<kPeek> (m, q.pc.w++, q.addr.h);
q.tmp0.w = q.addr.w;
q.addr.w += q.x;
cross = q.addr.h != q.tmp0.h;
break;
// Absolute, Y
case 0x19:
case 0x1B:
case 0x39:
case 0x3B:
case 0x59:
case 0x5B:
case 0x79:
case 0x7B:
case 0x99:
case 0x9B:
case 0x9E: // SXA
case 0x9F: // AXA
case 0xB9:
case 0xBB:
case 0xBE:
case 0xBF:
case 0xD9:
case 0xDB:
case 0xF9:
case 0xFB:
tick<kPeek> (m, q.pc.w++, q.addr.l);
tick<kPeek> (m, q.pc.w++, q.addr.h);
q.tmp0.w = q.addr.w;
q.addr.w += q.y;
cross = q.addr.h != q.tmp0.h;
break;
// (Indirect)
case 0x6C:
tick<kPeek> (m, q.pc.w++, q.addr.l);
tick<kPeek> (m, q.pc.w++, q.addr.h);
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.addr.l += 1u;
tick<kPeek> (m, q.addr.w, q.tmp0.h);
q.addr.w = q.tmp0.w;
break;
// (Indirect, X)
case 0x01:
case 0x03:
case 0x21:
case 0x23:
case 0x41:
case 0x43:
case 0x61:
case 0x63:
case 0x81:
case 0x83:
case 0xA1:
case 0xA3:
case 0xC1:
case 0xC3:
case 0xE1:
case 0xE3:
tick<kPeek> (m, q.pc.w++, q.addr.w);
q.addr.w += q.x;
tick<kDummyPeek> (m, q.addr.w, discard);
tick<kPeek> (m, q.addr.l, q.tmp0.l);
q.addr.l += 1u;
tick<kPeek> (m, q.addr.l, q.tmp0.h);
q.addr.w = q.tmp0.w;
break;
// (Indirect), Y
case 0x11:
case 0x13:
case 0x31:
case 0x33:
case 0x51:
case 0x53:
case 0x71:
case 0x73:
case 0x91:
case 0x93: // AXA
case 0xB1:
case 0xB3:
case 0xD1:
case 0xD3:
case 0xF1:
case 0xF3:
tick<kPeek> (m, q.pc.w++, q.addr.w);
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.addr.l += 1u;
tick<kPeek> (m, q.addr.l, q.tmp0.h);
q.addr.w = q.tmp0.w;
q.addr.w += q.y;
cross = q.addr.h != q.tmp0.h;
break;
// Relative
case 0x10:
case 0x30:
case 0x50:
case 0x70:
case 0x90:
case 0xB0:
case 0xD0:
case 0xF0:
tick<kPeek> (m, q.pc.w++, q.addr.w);
q.addr.w += q.pc.w - ((q.addr.w & 0x80) << 1u);
break;
default:
//if (is_in<0x40, 0x60, 0x08, 0x18, 0x28, 0x38, 0x48, 0x58, 0x68, 0x78, 0x88, 0x98, 0xA8, 0xB8, 0xC8, 0xD8, 0xE8, 0xF8, 0x0A, 0x1A, 0x2A, 0x3A, 0x4A, 0x5A, 0x6A, 0x7A, 0x8A, 0x9A, 0xAA, 0xBA, 0xCA, 0xDA, 0xEA, 0xFA>(next))
//{
// tick<kDummyPeek> (m, q.pc.w, discard);
//}
assert (false);
break;
}
switch (next)
{
case 0x000:// BRK
case 0x100:// IRQ
case 0x101:// NMI
case 0x102:// RST
q.p.b = (next == 0x000u);
q.p.i = (next == 0x100u);
q.addr.w = vectors [next & 0xFF];
tick<kDummyPeek> (m, q.pc.w, next);
tick<kDummyPeek> (m, q.pc.w, next);
tick<kPoke> (m, 0x100 + q.s--, q.pc.h);
tick<kPoke> (m, 0x100 + q.s--, q.pc.l);
tick<kPoke> (m, 0x100 + q.s--, q.p.bits);
tick<kPeek> (m, q.addr.w, q.pc.l);
tick<kPeek> (m, q.addr.w + 1u, q.pc.h);
break;
// JMP
case 0x4C:
case 0x6C:
q.pc = q.addr;
break;
// JSR
case 0x20: // Todo: double-check the order of stores/loads
--q.pc.w;
tick<kPoke> (m, 0x100 + q.s, q.addr.l);
tick<kPoke> (m, 0x100 + q.s--, q.pc.h);
tick<kPoke> (m, 0x100 + q.s--, q.pc.l);
q.pc.w = q.addr.w;
break;
// RTS
case 0x60:
tick<kDummyPeek> (m, 0x100 + q.s, q.tmp0.l);
tick<kPeek> (m, 0x100 + ++q.s, q.pc.l);
tick<kPeek> (m, 0x100 + ++q.s, q.pc.h);
tick<kDummyPeek> (m, q.pc.w++, q.tmp0.l);
break;
// RTI
case 0x40:
tick<kPeek> (m, 0x100 + ++q.s, q.p.bits);
q.p.b = 0;
q.p.e = 1;
tick<kPeek> (m, 0x100 + ++q.s, q.pc.l);
tick<kPeek> (m, 0x100 + ++q.s, q.pc.h);
tick<kDummyPeek> (m, q.pc.w, q.tmp0.l);
break;
// LDA
case 0xA1:
case 0xA5:
case 0xA9:
case 0xAD:
case 0xB1:
case 0xB5:
case 0xB9:
case 0xBD:
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.w);
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.p.z = !!(q.tmp0.l == 0u);
q.p.n = !!(q.tmp0.l & 0x80u);
q.a = q.tmp0.l;
break;
// LDX
case 0xB6:
case 0xA2:
case 0xA6:
case 0xAE:
case 0xBE:
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.p.z = !!(q.tmp0.l == 0u);
q.p.n = !!(q.tmp0.l & 0x80u);
q.x = q.tmp0.l;
break;
// LAX
case 0xA7: // LAX ab ;*=add 1 3
case 0xB7: // LAX ab,Y ;if page 4
case 0xAF: // LAX abcd ;No. Cycles= 4
case 0xBF: // LAX abcd,Y ; 4*
case 0xA3: // LAX (ab,X) ;boundary 6
case 0xB3: // LAX (ab),Y ;is crossed 5*
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.p.z = !!(q.tmp0.l == 0u);
q.p.n = !!(q.tmp0.l & 0x80u);
q.x = q.a = q.tmp0.l;
break;
// LDY
case 0xB4:
case 0xA0:
case 0xA4:
case 0xAC:
case 0xBC:
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.p.z = !!(q.tmp0.l == 0u);
q.p.n = !!(q.tmp0.l & 0x80u);
q.y = q.tmp0.l;
break;
// STX
case 0x96:
case 0x86:
case 0x8E:
tick<kPoke> (m, q.addr.w, q.x);
break;
// STY
case 0x94:
case 0x84:
case 0x8C:
tick<kPoke> (m, q.addr.w, q.y);
break;
// STA
case 0x91:
case 0x99:
case 0x9D:
tick<kDummyPeek> (m, q.addr.w, q.a);
case 0x81:
case 0x85:
case 0x8D:
case 0x95:
tick<kPoke> (m, q.addr.w, q.a);
break;
// BIT
case 0x24:
case 0x2C:
tick<kPeek> (m, q.addr.w, q.tmp0.w);
q.p.z = !(q.tmp0.l & q.a);
q.p.n = !!(q.tmp0.l & 0x80u);
q.p.v = !!(q.tmp0.l & 0x40u);
break;
// AND
case 0x21:
case 0x25:
case 0x29:
case 0x2D:
case 0x31:
case 0x35:
case 0x39:
case 0x3D:
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.tmp0.l = (q.a &= q.tmp0.l);
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// CMP
case 0xC1:
case 0xC5:
case 0xC9:
case 0xCD:
case 0xD1:
case 0xD5:
case 0xD9:
case 0xDD:
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.w);
tick<kPeek> (m, q.addr.w, q.tmp0.w);
q.p.z = !!(q.a == q.tmp0.l);
q.p.n = !!((q.a - q.tmp0.l) & 0x80u);
q.p.c = !!(q.tmp0.l <= q.a);
break;
// CPY
case 0xC0:
case 0xC4:
case 0xCC:
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.p.z = !!(q.y == q.tmp0.l);
q.p.n = !!((q.y - q.tmp0.l) & 0x80u);
q.p.c = !!(q.tmp0.l <= q.y);
break;
// CPX
case 0xE0:
case 0xE4:
case 0xEC:
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.p.z = !!(q.x == q.tmp0.l);
q.p.n = !!((q.x - q.tmp0.l) & 0x80u);
q.p.c = !!(q.tmp0.l <= q.x);
break;
// ORA
case 0x01:
case 0x05:
case 0x09:
case 0x0D:
case 0x11:
case 0x15:
case 0x19:
case 0x1D:
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.tmp0.l = (q.a |= q.tmp0.l);
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// EOR
case 0x49:
case 0x45:
case 0x55:
case 0x4D:
case 0x5D:
case 0x59:
case 0x41:
case 0x51:
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.w);
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.tmp0.l = (q.a ^= q.tmp0.l);
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// ADC
case 0x69:
case 0x65:
case 0x75:
case 0x6D:
case 0x7D:
case 0x79:
case 0x61:
case 0x71:
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.w);
tick<kPeek> (m, q.addr.w, q.tmp0.w);
q.tmp1.w = q.tmp0.w + !!(q.p.c) + q.a;
q.p.z = !q.tmp1.l;
q.p.n = !!(q.tmp1.l & 0x80);
q.p.c = !!q.tmp1.h;
q.p.v = !!((~(q.a ^ q.tmp0.l) & (q.a ^ q.tmp1.l)) >> 7u);
q.a = q.tmp1.l;
break;
// SBC
case 0xE1:
case 0xE5:
case 0xE9:
case 0xEB:
case 0xED:
case 0xF1:
case 0xF5:
case 0xF9:
case 0xFD:
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.w);
tick<kPeek> (m, q.addr.w, q.tmp0.w);
q.tmp1.w = q.a - q.tmp0.w - !q.p.c;
q.p.z = !q.tmp1.l;
q.p.n = !!(q.tmp1.l & 0x80);
q.p.c = !q.tmp1.h;
q.p.v = !!(((q.a ^ q.tmp0.l) & (q.a ^ q.tmp1.l)) >> 7u);
q.a = q.tmp1.l;
break;
// SEC
case 0x38:
q.p.c = 1;
break;
// SED
case 0xF8:
q.p.d = 1;
break;
// SEI
case 0x78:
q.p.i = 1;
break;
// CLC
case 0x18:
q.p.c = 0;
break;
// CLD
case 0xD8:
q.p.d = 0;
break;
// CLI
case 0x58:
q.p.i = 0;
break;
// CLV
case 0xB8:
q.p.v = 0;
break;
// DEX
case 0xCA:
q.tmp0.w = --q.x;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// DEY
case 0x88:
q.tmp0.w = --q.y;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// INX
case 0xE8:
q.tmp0.w = ++q.x;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// INY
case 0xC8:
q.tmp0.w = ++q.y;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// ASL A
case 0x0A:
(q.tmp0.w = q.a) <<= 1u;
q.a = q.tmp0.l;
q.p.c = !!(q.tmp0.w & 0x100u);
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// LSR A
case 0x4A:
q.p.c = !!(q.a & 0x1u);
q.tmp0.l = (q.a >>= 1u);
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// ROR A
case 0x6A:
q.tmp0.h = !!q.p.c;
q.p.c = !!(q.a & 0x1u);
q.tmp0.l = (q.a = (q.a >> 1u) | (q.tmp0.h << 7u));
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// ROL A
case 0x2A:
q.tmp0.h = !!q.p.c;
q.p.c = !!(q.a & 0x80u);
q.tmp0.l = (q.a = (q.a << 1u) | q.tmp0.h);
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// TAX
case 0xAA:
q.tmp0.w = q.x = q.a;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// TAY
case 0xA8:
q.tmp0.w = q.y = q.a;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// TXA
case 0x8A:
q.tmp0.w = q.a = q.x;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// TYA
case 0x98:
q.tmp0.w = q.a = q.y;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// TSX
case 0xBA:
q.tmp0.w = q.x = q.s;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// TXS
case 0x9A:
q.tmp0.w = q.s = q.x;
break;
// BCS
case 0xB0:
if (!q.p.c)
break;
if (q.pc.h != q.addr.h)
tick<kDummyPeek> (m, (q.pc.l = q.addr.l, q.pc.w), q.tmp0.l);
tick<kPeek> (m, q.pc.w = q.addr.w, q.tmp0.l);
break;
// BCC
case 0x90:
if (q.p.c)
break;
if (q.pc.h != q.addr.h)
tick<kDummyPeek> (m, (q.pc.l = q.addr.l, q.pc.w), q.tmp0.l);
tick<kPeek> (m, q.pc.w = q.addr.w, q.tmp0.l);
break;
// BEQ
case 0xF0:
if (!q.p.z)
break;
if (q.pc.h != q.addr.h)
tick<kDummyPeek> (m, (q.pc.l = q.addr.l, q.pc.w), q.tmp0.l);
tick<kPeek> (m, q.pc.w = q.addr.w, q.tmp0.l);
break;
// BNE
case 0xD0:
if (q.p.z)
break;
if (q.pc.h != q.addr.h)
tick<kDummyPeek> (m, (q.pc.l = q.addr.l, q.pc.w), q.tmp0.l);
tick<kPeek> (m, q.pc.w = q.addr.w, q.tmp0.l);
break;
// BVS
case 0x70:
if (!q.p.v)
break;
if (q.pc.h != q.addr.h)
tick<kDummyPeek> (m, (q.pc.l = q.addr.l, q.pc.w), q.tmp0.l);
tick<kPeek> (m, q.pc.w = q.addr.w, q.tmp0.l);
break;
// BVC
case 0x50:
if (q.p.v)
break;
if (q.pc.h != q.addr.h)
tick<kDummyPeek> (m, (q.pc.l = q.addr.l, q.pc.w), q.tmp0.l);
tick<kPeek> (m, q.pc.w = q.addr.w, q.tmp0.l);
break;
// BMI
case 0x30:
if (!q.p.n)
break;
if (q.pc.h != q.addr.h)
tick<kDummyPeek> (m, (q.pc.l = q.addr.l, q.pc.w), q.tmp0.l);
tick<kPeek> (m, q.pc.w = q.addr.w, q.tmp0.l);
break;
// BPL
case 0x10:
if (q.p.n)
break;
if (q.pc.h != q.addr.h)
tick<kDummyPeek> (m, (q.pc.l = q.addr.l, q.pc.w), q.tmp0.l);
tick<kPeek> (m, q.pc.w = q.addr.w, q.tmp0.l);
break;
// PHP
case 0x08:
q.tmp0.l = q.p.bits | BreakFlag;
tick<kPoke> (m, 0x100 + q.s--, q.tmp0.l);
break;
// PHA
case 0x48:
q.tmp0.l = q.a;
tick<kPoke> (m, 0x100 + q.s--, q.tmp0.l);
break;
// PLP
case 0x28:
tick<kDummyPeek> (m, 0x100 + q.s, q.tmp0.l);
tick<kPeek> (m, 0x100 + ++q.s, q.tmp0.l);
q.p.bits = q.tmp0.l;
q.p.b = 0;
q.p.e = 1;
break;
// PLA
case 0x68:
tick<kDummyPeek> (m, 0x100 + q.s, q.tmp0.l);
tick<kPeek> (m, 0x100 + ++q.s, q.tmp0.l);
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
q.a = q.tmp0.l;
break;
// ASL
case 0x1E:
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
case 0x06:
case 0x16:
case 0x0E:
tick<kPeek> (m, q.addr.w, q.tmp0.l);
tick<kDummyPoke> (m, q.addr.w, q.tmp0.l);
q.p.c = !!(q.tmp0.l & 0x80);
q.tmp0.l <<= 1u;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
tick<kPoke> (m, q.addr.w, q.tmp0.l);
break;
// LSR
case 0x5E:
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
case 0x46:
case 0x56:
case 0x4E:
tick<kPeek> (m, q.addr.w, q.tmp0.l);
tick<kDummyPoke> (m, q.addr.w, q.tmp0.l);
q.p.c = !!(q.tmp0.l & 0x1u);
q.tmp0.l >>= 1u;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
tick<kPoke> (m, q.addr.w, q.tmp0.l);
break;
// ROL
case 0x3E:
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
case 0x26:
case 0x36:
case 0x2E:
tick<kPeek> (m, q.addr.w, q.tmp0.l);
tick<kDummyPoke> (m, q.addr.w, q.tmp0.l);
q.tmp0.h = !!q.p.c;
q.p.c = !!(q.tmp0.l & 0x80);
q.tmp0.l = (q.tmp0.l << 1u) | q.tmp0.h;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
tick<kPoke> (m, q.addr.w, q.tmp0.l);
break;
// ROR
case 0x7E:
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
case 0x66:
case 0x76:
case 0x6E:
tick<kPeek> (m, q.addr.w, q.tmp0.l);
tick<kDummyPoke> (m, q.addr.w, q.tmp0.l);
q.tmp0.h = !!q.p.c;
q.p.c = !!(q.tmp0.l & 0x1u);
q.tmp0.l = (q.tmp0.l >> 1u) | (q.tmp0.h << 7u);
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
tick<kPoke> (m, q.addr.w, q.tmp0.l);
break;
// INC
case 0xFE:
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
case 0xE6:
case 0xF6:
case 0xEE:
tick<kPeek> (m, q.addr.w, q.tmp0.l);
tick<kDummyPoke> (m, q.addr.w, q.tmp0.l);
++q.tmp0.l;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
tick<kPoke> (m, q.addr.w, q.tmp0.l);
break;
// DEC
case 0xDE:
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
case 0xC6:
case 0xD6:
case 0xCE:
tick<kPeek> (m, q.addr.w, q.tmp0.l);
tick<kDummyPoke> (m, q.addr.w, q.tmp0.l);
--q.tmp0.l;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
tick<kPoke> (m, q.addr.w, q.tmp0.l);
break;
// DCP/DCM
case 0xC7: // DCM ab 5
case 0xD7: // DCM ab,X 6
case 0xCF: // DCM abcd No. Cycles= 6
case 0xDF: // DCM abcd,X 7
case 0xDB: // DCM abcd,Y 7
case 0xC3: // DCM (ab,X) 8
case 0xD3: // DCM (ab),Y 8
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.w);
tick<kPeek> (m, q.addr.w, q.tmp0.l);
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l--);
q.p.z = !!(q.a == q.tmp0.l);
q.p.n = !!((q.a - q.tmp0.l) & 0x80u);
q.p.c = !!(q.tmp0.l <= q.a);
tick<kPoke> (m, q.addr.w, q.tmp0.l);
break;
// AXS
case 0x87: //AXS ab ;ab ; 3
case 0x97: //AXS ab,Y ;ab ; 4
case 0x8F: //AXS abcd ;cd ab ;No. Cycles= 4
case 0x83: //AXS (ab,X) ;ab ; 6
tick<kPoke> (m, q.addr.w, q.a & q.x);
break;
// ISB/INS/ISC
case 0xE7: // INS ab 5
case 0xF7: // INS ab,X 6
case 0xEF: // INS abcd No. Cycles= 6
case 0xFF: // INS abcd,X 7
case 0xFB: // INS abcd,Y 7
case 0xE3: // INS (ab,X) 8
case 0xF3: // INS (ab),Y 8
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.w);
tick<kPeek> (m, q.addr.w, q.tmp0.w);
tick<kDummyPoke> (m, q.addr.w, q.tmp0.l++);
tick<kPoke> (m, q.addr.w, q.tmp0.w);
q.tmp1.w = q.a - q.tmp0.w - !q.p.c;
q.p.z = !q.tmp1.l;
q.p.n = !!(q.tmp1.l & 0x80);
q.p.c = !q.tmp1.h;
q.p.v = !!(((q.a ^ q.tmp0.l) & (q.a ^ q.tmp1.l)) >> 7u);
q.a = q.tmp1.l;
break;
// ASO/SLO
case 0x1F: // ASO abcd,X 7
case 0x1B: // ASO abcd,Y 7
case 0x13: // ASO (ab),Y 8
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
case 0x07: // ASO ab 5
case 0x17: // ASO ab,X 6
case 0x0F: // ASO abcd No. Cycles= 6
case 0x03: // ASO (ab,X) 8
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.p.c = !!(q.tmp0.l & 0x80);
q.tmp0.l <<= 1u;
tick<kPoke> (m, q.addr.w, q.tmp0.l);
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l = (q.a |= q.tmp0.l));
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
// RLA
case 0x33: // RLA (ab),Y 8
case 0x3B: // RLA abcd,Y 7
case 0x3F: // RLA abcd,X 7
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
case 0x27: // RLA ab 5
case 0x37: // RLA ab,X 6
case 0x2F: // RLA abcd No. Cycles= 6
case 0x23: // RLA (ab,X) 8
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.tmp0.h = !!q.p.c;
q.p.c = !!(q.tmp0.l & 0x80);
q.tmp0.l = (q.tmp0.l << 1u) | q.tmp0.h;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
tick<kPoke> (m, q.addr.w, q.tmp0.l);
tick<kDummyPeek> (m, q.addr.w, q.a &= q.tmp0.l);
break;
// LSE/SRE
case 0x5F: // LSE abcd,X 7
case 0x5B: // LSE abcd,Y 7
case 0x53: // LSE (ab),Y 8
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
case 0x47: // LSE ab 5
case 0x57: // LSE ab,X 6
case 0x4F: // LSE abcd No. Cycles= 6
case 0x43: // LSE (ab,X) 8
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.p.c = !!(q.tmp0.l & 0x1u);
q.tmp0.l >>= 1u;
tick<kPoke> (m, q.addr.w, q.tmp0.l);
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l = (q.a ^= q.tmp0.l));
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80u);
break;
case 0x7B: // RRA abcd,Y 7
case 0x7F: // RRA abcd,X 7
case 0x73: // RRA (ab),Y 8
tick<kDummyPeek> (m, q.addr.w, q.tmp0.l);
case 0x67: // RRA ab 5
case 0x77: // RRA ab,X 6
case 0x6F: // RRA abcd No. Cycles= 6
case 0x63: // RRA (ab,X) 8
tick<kPeek> (m, q.addr.w, q.tmp0.l);
q.tmp0.h = !!q.p.c;
q.p.c = !!(q.tmp0.l & 0x1u);
q.tmp0.l = (q.tmp0.l >> 1u) | (q.tmp0.h << 7u);
tick<kPoke> (m, q.addr.w, q.tmp0.w &= 0xff);
tick<kDummyPeek> (m, q.addr.w, q.tmp0.w);
q.tmp1.w = q.tmp0.w + (!!q.p.c) + q.a;
q.p.z = !q.tmp1.l;
q.p.n = !!(q.tmp1.l & 0x80);
q.p.c = !!q.tmp1.h;
q.p.v = !!((~(q.a ^ q.tmp0.l) & (q.a ^ q.tmp1.l)) >> 7u);
q.a = q.tmp1.l;
break;
case 0xBB:
if (cross)
tick<kDummyPeek>(m, q.addr.w, q.tmp0.l);
q.tmp0.l &= q.s;
q.a = q.tmp0.l;
q.x = q.tmp0.l;
q.s = q.tmp0.l;
q.p.z = !q.tmp0.l;
q.p.n = !!(q.tmp0.l & 0x80);
tick<kDummyPeek>(m, q.addr.w, q.tmp0.l);
break;
// NOP
case 0x1C:
case 0x3C:
case 0x5C:
case 0x7C:
case 0xDC:
case 0xFC:
if (cross == true)
tick<kDummyPeek> (m, q.addr.w, q.tmp0.w);
case 0x0C:
case 0x04:
case 0x14:
case 0x34:
case 0x44:
case 0x54:
case 0x64:
case 0x74:
case 0x80:
case 0xD4:
case 0xF4:
tick<kDummyPeek> (m, q.addr.w, q.tmp0.w);
case 0x1A:
case 0x3A:
case 0x5A:
case 0x7A:
case 0xDA:
case 0xEA:
case 0xFA:
break;
default:
assert (false);
break;
}
}
return true;
}
template <ResetType _Type>
void RicohCPU::reset ()
{
if constexpr (_Type == kSoftReset)
setSignal (ResetBit);
if constexpr (_Type == kHardReset)
new (&q) State{};
}
inline void RicohCPU::setSignal (byte bits)
{
if (bits & ResetBit)
q.mode.rst = 1u;
if (bits & NonMaskableBit)
q.mode.nmi = 1u;
if (bits & InterruptBit)
q.mode.irq = 1u;
}
inline void RicohCPU::clrSignal (byte bits)
{
if (bits & ResetBit)
q.mode.rst = 0u;
if (bits & NonMaskableBit)
q.mode.nmi = 0u;
if (bits & InterruptBit)
q.mode.irq = 0u;
}
inline RicohCPU::RicohCPU (State state)
: q (std::move (state))
{}
|
metaphore/gdx-liftoff | src/main/java/com/github/czyzby/lml/parser/impl/attribute/scroll/ScrollPercentLmlAttribute.java | <reponame>metaphore/gdx-liftoff
package com.github.czyzby.lml.parser.impl.attribute.scroll;
import com.badlogic.gdx.scenes.scene2d.ui.ScrollPane;
import com.github.czyzby.lml.parser.LmlParser;
import com.github.czyzby.lml.parser.action.ActorConsumer;
import com.github.czyzby.lml.parser.tag.LmlAttribute;
import com.github.czyzby.lml.parser.tag.LmlTag;
import com.github.czyzby.lml.util.LmlUtilities;
/** See {@link ScrollPane#setScrollPercentX(float)}, {@link ScrollPane#setScrollPercentY(float)}. Mapped to
* "scrollPercent", "percent".
*
* @author MJ */
public class ScrollPercentLmlAttribute implements LmlAttribute<ScrollPane> {
@Override
public Class<ScrollPane> getHandledType() {
return ScrollPane.class;
}
@Override
public void process(final LmlParser parser, final LmlTag tag, final ScrollPane actor,
final String rawAttributeData) {
LmlUtilities.getLmlUserObject(actor).addOnCloseAction(new ActorConsumer<Object, Object>() {
@Override
public Object consume(final Object widget) {
actor.layout(); // Needed to calculate scroll pane size.
final float percent = parser.parseFloat(rawAttributeData, actor);
actor.setScrollPercentX(percent);
actor.setScrollPercentY(percent);
return null;
}
});
}
}
|
CiscoDevNet/ydk-cpp | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_acl_cfg.hpp | <filename>cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_acl_cfg.hpp
#ifndef _CISCO_IOS_XR_IPV4_ACL_CFG_
#define _CISCO_IOS_XR_IPV4_ACL_CFG_
#include <memory>
#include <vector>
#include <string>
#include <ydk/types.hpp>
#include <ydk/errors.hpp>
namespace cisco_ios_xr {
namespace Cisco_IOS_XR_ipv4_acl_cfg {
class Ipv4AclAndPrefixList : public ydk::Entity
{
public:
Ipv4AclAndPrefixList();
~Ipv4AclAndPrefixList();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
std::shared_ptr<ydk::Entity> clone_ptr() const override;
ydk::augment_capabilities_function get_augment_capabilities_function() const override;
std::string get_bundle_yang_models_location() const override;
std::string get_bundle_name() const override;
std::map<std::pair<std::string, std::string>, std::string> get_namespace_identity_lookup() const override;
class Accesses; //type: Ipv4AclAndPrefixList::Accesses
class Prefixes; //type: Ipv4AclAndPrefixList::Prefixes
class LogUpdate; //type: Ipv4AclAndPrefixList::LogUpdate
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses> accesses;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Prefixes> prefixes;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::LogUpdate> log_update;
}; // Ipv4AclAndPrefixList
class Ipv4AclAndPrefixList::Accesses : public ydk::Entity
{
public:
Accesses();
~Accesses();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
std::string get_absolute_path() const override;
class Access; //type: Ipv4AclAndPrefixList::Accesses::Access
ydk::YList access;
}; // Ipv4AclAndPrefixList::Accesses
class Ipv4AclAndPrefixList::Accesses::Access : public ydk::Entity
{
public:
Access();
~Access();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
std::string get_absolute_path() const override;
ydk::YLeaf access_list_name; //type: string
class AccessListEntries; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries> access_list_entries;
}; // Ipv4AclAndPrefixList::Accesses::Access
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries : public ydk::Entity
{
public:
AccessListEntries();
~AccessListEntries();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
class AccessListEntry; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry
ydk::YList access_list_entry;
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry : public ydk::Entity
{
public:
AccessListEntry();
~AccessListEntry();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf sequence_number; //type: uint32
ydk::YLeaf grant; //type: Ipv4AclGrantEnum
ydk::YLeaf protocol_operator; //type: Ipv4AclOperatorEnum
ydk::YLeaf protocol; //type: one of uint32, enumeration
ydk::YLeaf protocol2; //type: one of uint32, enumeration
ydk::YLeaf fragment_type; //type: one of uint32, enumeration
ydk::YLeaf counter_name; //type: string
ydk::YLeaf igmp_message_type; //type: one of uint32, enumeration
ydk::YLeaf precedence; //type: one of uint32, enumeration
ydk::YLeaf log_option; //type: Ipv4AclLoggingEnum
ydk::YLeaf capture; //type: boolean
ydk::YLeaf icmp_off; //type: empty
ydk::YLeaf qos_group; //type: uint32
ydk::YLeaf set_ttl; //type: uint32
ydk::YLeaf fragments; //type: empty
ydk::YLeaf remark; //type: string
ydk::YLeaf source_prefix_group; //type: string
ydk::YLeaf destination_prefix_group; //type: string
ydk::YLeaf source_port_group; //type: string
ydk::YLeaf destination_port_group; //type: string
ydk::YLeaf sequence_str; //type: string
class SourceNetwork; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::SourceNetwork
class DestinationNetwork; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::DestinationNetwork
class SourcePort; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::SourcePort
class DestinationPort; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::DestinationPort
class Icmp; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Icmp
class Tcp; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Tcp
class PacketLength; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::PacketLength
class TimeToLive; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::TimeToLive
class FragmentOffset; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::FragmentOffset
class NextHop; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop
class Dscp; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Dscp
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::SourceNetwork> source_network;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::DestinationNetwork> destination_network;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::SourcePort> source_port;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::DestinationPort> destination_port;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Icmp> icmp;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Tcp> tcp;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::PacketLength> packet_length;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::TimeToLive> time_to_live;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::FragmentOffset> fragment_offset;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop> next_hop;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Dscp> dscp;
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::SourceNetwork : public ydk::Entity
{
public:
SourceNetwork();
~SourceNetwork();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf source_address; //type: string
ydk::YLeaf source_wild_card_bits; //type: string
ydk::YLeaf source_prefix_length; //type: uint8
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::SourceNetwork
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::DestinationNetwork : public ydk::Entity
{
public:
DestinationNetwork();
~DestinationNetwork();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf destination_address; //type: string
ydk::YLeaf destination_wild_card_bits; //type: string
ydk::YLeaf destination_prefix_length; //type: uint8
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::DestinationNetwork
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::SourcePort : public ydk::Entity
{
public:
SourcePort();
~SourcePort();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf source_operator; //type: Ipv4AclOperatorEnum
ydk::YLeaf first_source_port; //type: one of uint32, enumeration
ydk::YLeaf second_source_port; //type: one of uint32, enumeration
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::SourcePort
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::DestinationPort : public ydk::Entity
{
public:
DestinationPort();
~DestinationPort();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf destination_operator; //type: Ipv4AclOperatorEnum
ydk::YLeaf first_destination_port; //type: one of uint32, enumeration
ydk::YLeaf second_destination_port; //type: one of uint32, enumeration
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::DestinationPort
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Icmp : public ydk::Entity
{
public:
Icmp();
~Icmp();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf icmp_type_code; //type: Ipv4AclIcmpTypeCodeEnum
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Icmp
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Tcp : public ydk::Entity
{
public:
Tcp();
~Tcp();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf tcp_bits_match_operator; //type: Ipv4AclTcpMatchOperatorEnum
ydk::YLeaf tcp_bits; //type: Ipv4AclTcpBitsNumber
ydk::YLeaf tcp_bits_mask; //type: Ipv4AclTcpBitsNumber
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Tcp
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::PacketLength : public ydk::Entity
{
public:
PacketLength();
~PacketLength();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf packet_length_operator; //type: Ipv4AclOperatorEnum
ydk::YLeaf packet_length_min; //type: uint32
ydk::YLeaf packet_length_max; //type: uint32
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::PacketLength
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::TimeToLive : public ydk::Entity
{
public:
TimeToLive();
~TimeToLive();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf time_to_live_operator; //type: Ipv4AclOperatorEnum
ydk::YLeaf time_to_live_min; //type: uint32
ydk::YLeaf time_to_live_max; //type: uint32
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::TimeToLive
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::FragmentOffset : public ydk::Entity
{
public:
FragmentOffset();
~FragmentOffset();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf fragment_offset_operator; //type: Ipv4AclOperatorEnum
ydk::YLeaf fragment_offset_1; //type: uint32
ydk::YLeaf fragment_offset_2; //type: uint32
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::FragmentOffset
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop : public ydk::Entity
{
public:
NextHop();
~NextHop();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf next_hop_type; //type: NextHopType
class NextHop1; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop1
class NextHop2; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop2
class NextHop3; //type: Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop3
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop1> next_hop_1;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop2> next_hop_2;
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop3> next_hop_3;
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop1 : public ydk::Entity
{
public:
NextHop1();
~NextHop1();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf next_hop; //type: string
ydk::YLeaf vrf_name; //type: string
ydk::YLeaf track_name; //type: string
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop1
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop2 : public ydk::Entity
{
public:
NextHop2();
~NextHop2();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf next_hop; //type: string
ydk::YLeaf vrf_name; //type: string
ydk::YLeaf track_name; //type: string
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop2
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop3 : public ydk::Entity
{
public:
NextHop3();
~NextHop3();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf next_hop; //type: string
ydk::YLeaf vrf_name; //type: string
ydk::YLeaf track_name; //type: string
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::NextHop::NextHop3
class Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Dscp : public ydk::Entity
{
public:
Dscp();
~Dscp();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf dscp_operator; //type: Ipv4AclOperatorEnum
ydk::YLeaf dscp_min; //type: one of uint32, enumeration
ydk::YLeaf dscp_max; //type: one of uint32, enumeration
}; // Ipv4AclAndPrefixList::Accesses::Access::AccessListEntries::AccessListEntry::Dscp
class Ipv4AclAndPrefixList::Prefixes : public ydk::Entity
{
public:
Prefixes();
~Prefixes();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
std::string get_absolute_path() const override;
class Prefix; //type: Ipv4AclAndPrefixList::Prefixes::Prefix
ydk::YList prefix;
}; // Ipv4AclAndPrefixList::Prefixes
class Ipv4AclAndPrefixList::Prefixes::Prefix : public ydk::Entity
{
public:
Prefix();
~Prefix();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
std::string get_absolute_path() const override;
ydk::YLeaf prefix_list_name; //type: string
class PrefixListEntries; //type: Ipv4AclAndPrefixList::Prefixes::Prefix::PrefixListEntries
std::shared_ptr<cisco_ios_xr::Cisco_IOS_XR_ipv4_acl_cfg::Ipv4AclAndPrefixList::Prefixes::Prefix::PrefixListEntries> prefix_list_entries;
}; // Ipv4AclAndPrefixList::Prefixes::Prefix
class Ipv4AclAndPrefixList::Prefixes::Prefix::PrefixListEntries : public ydk::Entity
{
public:
PrefixListEntries();
~PrefixListEntries();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
class PrefixListEntry; //type: Ipv4AclAndPrefixList::Prefixes::Prefix::PrefixListEntries::PrefixListEntry
ydk::YList prefix_list_entry;
}; // Ipv4AclAndPrefixList::Prefixes::Prefix::PrefixListEntries
class Ipv4AclAndPrefixList::Prefixes::Prefix::PrefixListEntries::PrefixListEntry : public ydk::Entity
{
public:
PrefixListEntry();
~PrefixListEntry();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
ydk::YLeaf sequence_number; //type: uint32
ydk::YLeaf grant; //type: Ipv4AclGrantEnum
ydk::YLeaf prefix; //type: string
ydk::YLeaf netmask; //type: string
ydk::YLeaf match_exact_length; //type: empty
ydk::YLeaf exact_prefix_length; //type: uint32
ydk::YLeaf match_max_length; //type: empty
ydk::YLeaf max_prefix_length; //type: uint32
ydk::YLeaf match_min_length; //type: empty
ydk::YLeaf min_prefix_length; //type: uint32
ydk::YLeaf remark; //type: string
}; // Ipv4AclAndPrefixList::Prefixes::Prefix::PrefixListEntries::PrefixListEntry
class Ipv4AclAndPrefixList::LogUpdate : public ydk::Entity
{
public:
LogUpdate();
~LogUpdate();
bool has_data() const override;
bool has_operation() const override;
std::vector<std::pair<std::string, ydk::LeafData> > get_name_leaf_data() const override;
std::string get_segment_path() const override;
std::shared_ptr<ydk::Entity> get_child_by_name(const std::string & yang_name, const std::string & segment_path) override;
void set_value(const std::string & value_path, const std::string & value, const std::string & name_space, const std::string & name_space_prefix) override;
void set_filter(const std::string & value_path, ydk::YFilter yfliter) override;
std::map<std::string, std::shared_ptr<ydk::Entity>> get_children() const override;
bool has_leaf_or_child_of_name(const std::string & name) const override;
std::string get_absolute_path() const override;
ydk::YLeaf threshold; //type: uint32
ydk::YLeaf rate; //type: uint32
}; // Ipv4AclAndPrefixList::LogUpdate
class NextHopType : public ydk::Enum
{
public:
static const ydk::Enum::YLeaf regular_next_hop;
static const ydk::Enum::YLeaf default_next_hop;
static int get_enum_value(const std::string & name) {
if (name == "regular-next-hop") return 1;
if (name == "default-next-hop") return 2;
return -1;
}
};
}
}
#endif /* _CISCO_IOS_XR_IPV4_ACL_CFG_ */
|
eaglesakura/eglibrary | deprecated/eglibrary-android-api8/src/com/eaglesakura/lib/android/game/loop/GameLoopManagerBase.java | package com.eaglesakura.lib.android.game.loop;
import com.eaglesakura.lib.android.game.display.VirtualDisplay;
import com.eaglesakura.lib.android.game.graphics.gl11.BitmapTextureImage;
import com.eaglesakura.lib.android.game.graphics.gl11.GPU;
import com.eaglesakura.lib.android.game.graphics.gl11.TextureImageBase;
import com.eaglesakura.lib.android.game.graphics.gl11.hw.EGLManager;
import com.eaglesakura.lib.android.game.input.MultiTouchInput;
import com.eaglesakura.lib.android.game.thread.AsyncHandler;
import com.eaglesakura.lib.android.game.thread.ThreadSyncRunnerBase;
import com.eaglesakura.lib.android.game.thread.UIHandler;
import com.eaglesakura.lib.android.game.util.LogUtil;
import com.eaglesakura.lib.android.game.util.Timer;
import com.eaglesakura.lib.android.view.CanvasView;
import com.eaglesakura.lib.android.view.OpenGLView;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.os.Handler;
import android.view.SurfaceHolder;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import java.io.IOException;
import java.io.InputStream;
/**
* ゲームループとそれに付随するViewを管理する。 ゲームループはUIスレッドとは別スレッドを利用していることに注意すること。
*/
@Deprecated
public abstract class GameLoopManagerBase {
/**
* フレームレートをFPS単位で指定する。
*/
int frameRate = 30;
/**
* 実行中の実際にフレームレートを取得する
*/
float runningFrameRate = 0;
/**
* レジュームをしてからフレームの復帰をさせるまでの時間。
* 標準で1000ms
*/
int resumeWaitTime = 1000;
/**
* 実フレームのカウントを行う。
*/
FramerateCounter framerateCounter = new FramerateCounter();
/**
* 呼び出し元
*/
Context context = null;
/**
* OpenGL用
*/
OpenGLView glView = null;
/**
* EGL管理クラス
*/
EGLManager egl = null;
/**
* Canvas用
*/
CanvasView canvasView = null;
/**
* 各Layoutを格納する。
*/
FrameLayout rootLayout = null;
/**
* マルチタッチ制御を行う。
*/
MultiTouchInput multiTouchInput;
/**
* 仮想ディスプレイを定義する。
*/
VirtualDisplay virtualDisplay = new VirtualDisplay();
/**
* ライフサイクルの状態を取得する。
*
* @author <NAME>
*/
protected enum LifeCycle {
/**
* ネイティブの起動中
*/
Booting,
/**
* 初期化中
*/
Initializing,
/**
* 実行中
*/
Running,
/**
* 一時中断中
*/
Paused,
/**
* 破棄されている。
*/
Finished,
}
LifeCycle lifeCycle = LifeCycle.Booting;
/**
* デバッグ用のタイマー
*/
private Timer debugTimer = new Timer();
/**
* デバッグフラグ
*/
private static final boolean TIME_OUTPUT = false;
/**
* タイマーを開始する。
*/
private void debugTimeBegin() {
if (TIME_OUTPUT) {
debugTimer.start();
}
}
/**
* タイマーを終了し、ログを吐き出す
*/
private void debugTimerEnd(String messageFormat) {
if (TIME_OUTPUT) {
LogUtil.log(String.format(messageFormat, debugTimer.end()));
}
}
/**
* 毎フレームのランナー
*/
Runnable frameRunner = new Runnable() {
@Override
public void run() {
final long frameTime = 1000 / frameRate;
final long start = System.currentTimeMillis();
debugTimeBegin();
{
//! タッチパネルを更新する
getMultiTouchInput().update();
//! ゲーム処理を呼び出す
onGameFrame();
}
debugTimerEnd("onGameFrame :: %d ms");
final long end = System.currentTimeMillis();
final long nextTime = Math.max(1, frameTime - (end - start));
//! 仮想フレームレートを更新する
runningFrameRate = 1000.0f / (Math.max(1.0f, (float) (end - start)));
//! 実フレームレートを更新する。
framerateCounter.update();
if (isNextFrameEnable()) {
gameHandle.postDelayed(this, nextTime);
}
}
};
/**
* 直近1フレームのフレームレートを取得する。
*/
public float getFramerateLast() {
return runningFrameRate;
}
/**
* 直近1秒のフレームレート実績値を取得する。
*/
public int getFramerateReal() {
return framerateCounter.getRealRate();
}
/**
* 親クラスが実装している。
*/
ILoopParent loopParent = null;
/**
* UIスレッド用ハンドル。
*/
UIHandler uiHandle = null;
/**
* ゲームループ用ハンドラ。
*/
static AsyncHandler gameHandle = AsyncHandler.createInstance("gameloop");
/**
*
* @param context
* @param loopParent
*/
public GameLoopManagerBase(Context context, ILoopParent loopParent) {
this.context = context;
this.loopParent = loopParent;
this.uiHandle = new UIHandler();
this.multiTouchInput = new MultiTouchInput(virtualDisplay);
gameHandle.getThread().setName(getThreadName());
createViews();
}
/**
* 仮想ディスプレイを取得する。
*/
public VirtualDisplay getVirtualDisplay() {
return virtualDisplay;
}
/**
* ゲームスレッド名を取得する。
* DDMSに反映される
*/
protected String getThreadName() {
return "GameThread";
}
/**
* マルチタッチデバイスを取得する。<BR>
* 基本的に二点を扱う。
*/
public MultiTouchInput getMultiTouchInput() {
return multiTouchInput;
}
/**
* フレームレートをFPS単位で指定する。
*/
public void setFrameRateParSec(int frameRate) {
this.frameRate = frameRate;
}
/**
* 描画用のViewを作成する。
*/
private void createViews() {
rootLayout = new FrameLayout(context);
{
glView = new OpenGLView(context);
glView.getHolder().addCallback(new SurfaceHolder.Callback() {
/**
* サーフェイスが破棄された
*/
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
(new ThreadSyncRunnerBase<Void>(gameHandle) {
@Override
public Void onOtherThreadRun() {
if (loopParent.isFinished() || lifeCycle == LifeCycle.Finished) {
onGamePause();
onGameFinalize();
System.gc();
// getGLManager().dispose();
egl.dispose();
lifeCycle = LifeCycle.Finished;
// gameHandle.dispose();
} else {
onGamePause();
// getGLManager().onPause();
lifeCycle = LifeCycle.Paused;
}
return null;
}
}).run();
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
/**
* サーフェイスが作成された
*/
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (paused || lifeCycle == LifeCycle.Finished) {
return;
}
LogUtil.log(String.format("Surface Size : %d x %d", width, height));
if (egl.isInitialized()) {
//! GLとゲームの復帰を行う
(new ThreadSyncRunnerBase<Void>(gameHandle) {
@Override
public Void onOtherThreadRun() {
if (getGLView().isDestroyed()) {
LogUtil.log("ResumeOpenGL");
}
LogUtil.log("GameResume");
onGameResume();
lifeCycle = LifeCycle.Running;
LogUtil.log("RestartGame");
gameHandle.post(frameRunner);
return null;
}
}).run();
} else {
gameHandle.post(new Runnable() {
@Override
public void run() {
onGameInitialize();
lifeCycle = LifeCycle.Running;
gameHandle.post(frameRunner);
}
});
}
}
});
}
{
canvasView = new CanvasView(context);
canvasView.setZOrderOnTop(true);
}
rootLayout.addView(glView);
rootLayout.addView(canvasView);
}
/**
* onResumeが呼ばれてからフレームを復帰させるまでの時間
*/
public void setResumeWaitTime(int resumeWaitTime) {
this.resumeWaitTime = resumeWaitTime;
}
/**
* GL管理クラスを取得する。
*/
public GPU getGLManager() {
return glView.getGLManager();
}
/**
*
* @return
*/
public OpenGLView getGLView() {
return glView;
}
/**
*
* @return
*/
public CanvasView getCanvasView() {
return canvasView;
}
/**
* 結び付けられたコンテキストを取得する。
*/
public Context getContext() {
return context;
}
/**
* 描画用のViewを取得する。
*/
public ViewGroup getRootView() {
return rootLayout;
}
/**
* 呼び出し元が実装する必要があるinterface
*/
public interface ILoopParent {
public boolean isFinished();
}
/**
* 次のフレームの動作を許す場合true
*/
boolean isNextFrameEnable() {
updateLifeCycle();
return lifeCycle == LifeCycle.Running;
}
/**
* ゲームスレッドで動作を行わせる。
*/
public void post(Runnable runnable) {
if (gameHandle != null) {
gameHandle.post(runnable);
}
}
/**
* ゲームスレッドにPostし、実行が完了するまでロックする。
*/
public void postWithWait(final Runnable runnable) {
if (gameHandle != null) {
(new ThreadSyncRunnerBase<Void>(gameHandle) {
@Override
public Void onOtherThreadRun() throws Exception {
runnable.run();
return null;
}
}).run();
}
}
/**
* UIハンドルで動作を行わせる。
*/
public void postUIThread(Runnable runnable) {
if (uiHandle != null) {
uiHandle.post(runnable);
}
}
/**
* UIスレッドにPostし、実行が完了するまでロックする。
*/
public void postUIThreadWithWait(final Runnable runnable) {
(new ThreadSyncRunnerBase<Void>(uiHandle) {
@Override
public Void onOtherThreadRun() throws Exception {
runnable.run();
return null;
}
}).run();
}
/**
* ライフサイクルの更新を行う。<BR>
* Activityが閉じられている場合、強制的にステートを切り替える。
*/
void updateLifeCycle() {
if (loopParent.isFinished()) {
lifeCycle = LifeCycle.Finished;
}
}
/**
* drawableのIDから画像を生成する。
*/
public Bitmap loadBitmapDrawable(int drawableId) {
Bitmap image = BitmapFactory.decodeResource(getContext().getResources(), drawableId);
return image;
}
/**
* drawableのIDから画像を生成する。
*/
public TextureImageBase loadImageDrawable(int drawableId) {
Bitmap image = BitmapFactory.decodeResource(getContext().getResources(), drawableId);
TextureImageBase result = new BitmapTextureImage(image, egl.getVRAM());
result.setTag("drawable-" + Integer.toHexString(drawableId));
image.recycle();
return result;
}
/**
* rawのidから画像を生成する。
*/
public TextureImageBase loadImageRaw(int rawId) throws IOException {
InputStream is = getContext().getResources().openRawResource(rawId);
try {
Bitmap image = BitmapFactory.decodeStream(is);
TextureImageBase result = new BitmapTextureImage(image, egl.getVRAM());
result.setTag("raw-" + Integer.toHexString(rawId));
image.recycle();
return result;
} finally {
is.close();
}
}
/**
* assetsのパスから画像を生成する。
*/
public TextureImageBase loadImageAssets(String assetsPath) throws IOException {
InputStream is = getContext().getAssets().open(assetsPath);
try {
Bitmap image = BitmapFactory.decodeStream(is);
TextureImageBase result = new BitmapTextureImage(image, egl.getVRAM());
result.setTag("assets-" + assetsPath);
image.recycle();
return result;
} finally {
is.close();
}
}
/**
* フォント描画用のテクスチャを生成して返す。
*/
public TextureImageBase createFontTexture(String text, int fontSize, int boundsAddX, int boundsAddY) {
Paint paint = new Paint();
paint.setTextSize(fontSize);
paint.setStyle(Style.STROKE);
Rect bounds = new Rect();
paint.getTextBounds(text, -1, -1, bounds);
Bitmap bitmap = Bitmap
.createBitmap(bounds.width() + boundsAddX, bounds.height() + boundsAddY, Config.ARGB_4444);
Canvas canvas = new Canvas(bitmap);
canvas.drawText(text, 0, 0, paint);
TextureImageBase result = new BitmapTextureImage(bitmap, egl.getVRAM());
result.setTag("font-" + text + " :: " + fontSize);
bitmap.recycle();
return result;
}
/**
* ゲームの初期化を行うフェイズ。
*/
protected abstract void onGameInitialize();
/**
* ゲームの終了処理を行うフェイズ。
*/
protected abstract void onGameFinalize();
/**
* 毎フレームの処理を行う。
*/
protected abstract void onGameFrame();
/**
* ゲームが中断された。
*/
protected abstract void onGamePause();
/**
* ゲームが復帰された。
*/
protected abstract void onGameResume();
/**
* pauseされていたらtrue
*/
boolean paused = true;
/**
* Activity#onPause
*/
public void onPause() {
paused = true;
}
public boolean isRunning() {
return lifeCycle == LifeCycle.Running;
}
/**
* Activity#onResume
*/
public void onResume() {
paused = false;
}
/**
* Activityを閉じる
*/
public void onDestroy() {
lifeCycle = LifeCycle.Finished;
}
/**
* このメソッドをゲーム用スレッドから呼び出している限り、trueを返す。
*/
public boolean isGameThread() {
return gameHandle.isHandlerThread();
}
/**
* ゲームループ用のハンドラを取得する。
*/
public Handler getGameHandler() {
return gameHandle;
}
/**
* UIハンドラを取得する。
*/
public UIHandler getUIHandler() {
return uiHandle;
}
/**
* ゲームを終了させる
*/
public void exit() {
uiHandle.post(new Runnable() {
@Override
public void run() {
if (context instanceof Activity) {
((Activity) context).finish();
}
}
});
}
}
|
joseph-whiting/scala | test/scalacheck/range.scala | import org.scalacheck._
import Prop._
import Gen._
import Arbitrary._
class Counter(r: Range) {
var cnt = 0L
var last: Option[Int] = None
val str = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
def apply(x: Int) = {
cnt += 1L
if (cnt % 500000000L == 0L) {
println("Working: %s %d %d" format (str, cnt, x))
}
if (cnt > (Int.MaxValue.toLong + 1) * 2) {
val msg = "Count exceeds maximum possible for an Int Range: %s" format str
println(msg) // exception is likely to be eaten by an out of memory error
sys error msg
}
if ((r.step > 0 && last.exists(_ > x)) || (r.step < 0 && last.exists(_ < x))) {
val msg = "Range %s wrapped: %d %s" format (str, x, last.toString)
println(msg) // exception is likely to be eaten by an out of memory error
sys error msg
}
last = Some(x)
}
}
abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
def myGen: Gen[Range]
def genReasonableSizeRange = oneOf(genArbitraryRange, genBoundaryRange)
def genArbitraryRange = for {
start <- choose(Int.MinValue, Int.MaxValue)
end <- choose(Int.MinValue, Int.MaxValue)
step <- choose(-Int.MaxValue, Int.MaxValue)
} yield Range(start, end, if (step == 0) 100 else step)
def genBoundaryRange = for {
boundary <- oneOf(Int.MinValue, -1, 0, 1, Int.MaxValue)
isStart <- arbitrary[Boolean]
size <- choose(1, 100)
step <- choose(1, 101)
} yield {
val signum = if (boundary == 0) 1 else boundary.signum
if (isStart) Range(boundary, boundary - size * boundary.signum, - step * signum)
else Range(boundary - size * boundary.signum, boundary, step * signum)
}
def genSmallRange = for {
start <- choose(-100, 100)
end <- choose(-100, 100)
step <- choose(1, 1)
} yield if (start < end) Range(start, end, step) else Range(start, end, -step)
def genRangeByOne = oneOf(genRangeOpenByOne, genRangeClosedByOne)
def genRangeOpenByOne = for {
r <- oneOf(genSmallRange, genBoundaryRange)
if (r.end.toLong - r.start.toLong).abs <= 10000000L
} yield if (r.start < r.end) Range(r.start, r.end) else Range(r.end, r.start)
def genRangeClosedByOne = for (r <- genRangeOpenByOne) yield r.start to r.end
def str(r: Range) = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
def expectedSize(r: Range): Long = if (r.isInclusive) {
(r.end.toLong - r.start.toLong < 0, r.step < 0) match {
case (true, true) | (false, false) => (r.end.toLong - r.start.toLong).abs / r.step.abs.toLong + 1L
case _ => if (r.start == r.end) 1L else 0L
}
} else {
(r.end.toLong - r.start.toLong < 0, r.step < 0) match {
case (true, true) | (false, false) => (
(r.end.toLong - r.start.toLong).abs / r.step.abs.toLong
+ (if ((r.end.toLong - r.start.toLong).abs % r.step.abs.toLong > 0L) 1L else 0L)
)
case _ => 0L
}
}
def within(r: Range, x: Int) = if (r.step > 0)
r.start <= x && (if (r.isInclusive) x <= r.end else x < r.end)
else
r.start >= x && (if (r.isInclusive) x >= r.end else x > r.end)
def multiple(r: Range, x: Int) = (x.toLong - r.start) % r.step == 0
property("foreach.step") = forAllNoShrink(myGen) { r =>
// println("foreach.step "+str(r))
var allValid = true
val cnt = new Counter(r)
// println("--------------------")
// println(r)
r foreach { x => cnt(x)
// println(x + ", " + (x - r.start) + ", " + (x.toLong - r.start) + ", " + ((x.toLong - r.start) % r.step))
allValid &&= multiple(r, x)
}
allValid :| str(r)
}
property("foreach.inside.range") = forAll(myGen) { r =>
// println("foreach.inside.range "+str(r))
var allValid = true
var last: Option[Int] = None
val cnt = new Counter(r)
r foreach { x => cnt(x)
allValid &&= within(r, x)
}
allValid :| str(r)
}
property("foreach.visited.size") = forAll(myGen) { r =>
// println("foreach.visited.size "+str(r))
var visited = 0L
val cnt = new Counter(r)
r foreach { x => cnt(x)
visited += 1L
}
// println("----------")
// println(str(r))
// println("size: " + r.size)
// println("expected: " + expectedSize(r))
// println("visited: " + visited)
(visited == expectedSize(r)) :| str(r)
}
property("sum") = forAll(myGen) { r =>
// println("----------")
// println("sum "+str(r))
val rSum = r.sum
val expected = r.length match {
case 0 => 0
case 1 => r.head
case x if x < 1000 =>
// Explicit sum, to guard against having the same mistake in both the
// range implementation and test implementation of sum formula.
// (Yes, this happened before.)
var i = r.head
var s = 0L
var n = x
while (n > 0) {
s += i
i += r.step
n -= 1
}
s.toInt
case _ =>
// Make sure head + last doesn't overflow!
((r.head.toLong + r.last) * r.length / 2).toInt
}
// println("size: " + r.length)
// println("expected: " + expected)
// println("obtained: " + rSum)
(rSum == expected) :| str(r)
}
/* checks that sum respects custom Numeric */
property("sumCustomNumeric") = forAll(myGen) { r =>
val mod = 65536
object mynum extends Numeric[Int] {
def plus(x: Int, y: Int): Int = (x + y) % mod
override def zero = 0
def fromInt(x: Int): Int = ???
def parseString(str: String) = ???
def minus(x: Int, y: Int): Int = ???
def negate(x: Int): Int = ???
def times(x: Int, y: Int): Int = ???
def toDouble(x: Int): Double = ???
def toFloat(x: Int): Float = ???
def toInt(x: Int): Int = ((x % mod) + mod * 2) % mod
def toLong(x: Int): Long = ???
def compare(x: Int, y: Int): Int = ???
}
val rSum = r.sum(mynum)
val expected = mynum.toInt(r.sum)
(rSum == expected) :| str(r)
}
property("length") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
// println("length "+str(r))
(r.length == expectedSize(r)) :| str(r)
}
property("isEmpty") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
// println("isEmpty "+str(r))
(r.isEmpty == (expectedSize(r) == 0L)) :| str(r)
}
property("contains") = forAll(myGen, arbInt.arbitrary) { (r, x) =>
// println("contains "+str(r))
// println("----------------")
// println(str(r))
// println(x)
// println("within: " + within(r, x))
// println("multiple: " + multiple(r, x))
// println("contains: " + r.contains(x))
((within(r, x) && multiple(r, x)) == r.contains(x)) :| str(r)+": "+x
}
property("take") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
// println("take "+str(r))
val t = r take x
(t.size == (0 max x min r.size) && t.start == r.start && t.step == r.step) :| str(r)+" / "+str(t)+": "+x
}
property("init") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
// println("init "+str(r))
(r.size == 0) || {
val t = r.init
(t.size + 1 == r.size) && (t.isEmpty || t.head == r.head)
}
}
property("takeWhile") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
// println("takeWhile "+str(r))
val t = (if (r.step > 0) r takeWhile (_ <= x) else r takeWhile(_ >= x))
if (r.size == 0) {
(t.size == 0) :| str(r)+" / "+str(t)+": "+x
} else {
val t2 = (if (r.step > 0) Range(r.start, x min r.last, r.step).inclusive else Range(r.start, x max r.last, r.step).inclusive)
(t.start == r.start && t.size == t2.size && t.step == r.step) :| str(r)+" / "+str(t)+" / "+str(t2)+": "+x
}
}
property("tails") = forAll(myGen) { r =>
r.tails.toList == r.toList.tails.toList
}
property("inits") = forAll(myGen) { r =>
r.inits.toList == r.toList.inits.toList
}
property("reverse.toSet.equal") = forAll(myGen) { r =>
// println("reverse.toSet.equal "+str(r))
val reversed = r.reverse
val aresame = r.toSet == reversed.toSet
if (!aresame) {
println(str(r))
println(r)
println(reversed)
println(r.toSet)
println(reversed.toSet)
}
aresame :| str(r)
}
property("grouped") = forAllNoShrink(
myGen,
Gen.posNum[Int],
) { (r, size) =>
r.grouped(size).toSeq == r.toList.grouped(size).toSeq
}
}
object NormalRangeTest extends RangeTest("normal") {
override def myGen = genReasonableSizeRange
def genOne = for {
start <- arbitrary[Int]
end <- arbitrary[Int]
if (start.toLong - end.toLong).abs < Int.MaxValue.toLong
} yield Range(start, end, if (start < end) 1 else - 1)
property("by 1.size + 1 == inclusive.size") = forAll(genOne) { r =>
(r.size + 1 == r.inclusive.size) :| str(r)
}
}
object InclusiveRangeTest extends RangeTest("inclusive") {
override def myGen = for (r <- genReasonableSizeRange) yield r.inclusive
}
object ByOneRangeTest extends RangeTest("byOne") {
override def myGen = genRangeByOne
}
object InclusiveByOneRangeTest extends RangeTest("inclusiveByOne") {
override def myGen = for (r <- genRangeByOne) yield r.inclusive
}
object SmallValuesRange extends RangeTest("smallValues") {
override def myGen = genSmallRange
}
object TooLargeRange extends Properties("Too Large Range") {
val genTooLargeStart = for {
start <- choose(-Int.MinValue, 0)
} yield start
property("Too large range throws exception") = forAll(genTooLargeStart) { start =>
try {
val r = Range.inclusive(start, Int.MaxValue, 1)
val l = r.length
println("how here? length = " + l + ", r = " + r.toString)
false
}
catch { case _: IllegalArgumentException => true }
}
}
/* Mini-benchmark
def testRange(i: Int, j: Int, k: Int) = {
var count = 0
for {
vi <- 0 to i
vj <- 0 to j
vk <- 0 to k
} { count += 1 }
}
testRange(10, 1000, 10000)
testRange(10000, 1000, 10)
*/
|
roman-sd/java-a-to-z | chapter_011/springMVC/src/main/java/ru/sdroman/carstore/models/OrderDTO.java | package ru.sdroman.carstore.models;
/**
* @author sdroman
* @since 08.2018
*/
public class OrderDTO {
/**
* Id.
*/
private int id;
/**
* Description.
*/
private String description;
/**
* Price.
*/
private int price;
/**
* Year.
*/
private String year;
/**
* Car body id.
*/
private int bodyId;
/**
* Car engine id.
*/
private int engineId;
/**
* Car model id.
*/
private int modelId;
/**
* Car transmission id.
*/
private int transmissionId;
/**
* Car drivetype id.
*/
private int driveTypeId;
/**
* Returns order id.
*
* @return int
*/
public int getId() {
return id;
}
/**
* Sets order id.
*
* @param id int
*/
public void setId(int id) {
this.id = id;
}
/**
* Returns description.
*
* @return String
*/
public String getDescription() {
return description;
}
/**
* Sets description.
*
* @param description String
*/
public void setDescription(String description) {
this.description = description;
}
/**
* Returns price.
*
* @return int
*/
public int getPrice() {
return price;
}
/**
* Sets price.
*
* @param price int
*/
public void setPrice(int price) {
this.price = price;
}
/**
* Returns year.
*
* @return String
*/
public String getYear() {
return year;
}
/**
* Sets year.
*
* @param year String
*/
public void setYear(String year) {
this.year = year;
}
/**
* Returns car body id.
*
* @return int
*/
public int getBodyId() {
return bodyId;
}
/**
* Sets car body id.
*
* @param bodyId int
*/
public void setBodyId(int bodyId) {
this.bodyId = bodyId;
}
/**
* Returns car engine id.
*
* @return int
*/
public int getEngineId() {
return engineId;
}
/**
* Sets car engine id.
*
* @param engineId int
*/
public void setEngineId(int engineId) {
this.engineId = engineId;
}
/**
* Returns car model id.
*
* @return int
*/
public int getModelId() {
return modelId;
}
/**
* Sets car model id.
*
* @param modelId id
*/
public void setModelId(int modelId) {
this.modelId = modelId;
}
/**
* Returns car transmission id.
*
* @return int
*/
public int getTransmissionId() {
return transmissionId;
}
/**
* Sets car transmission id.
*
* @param transmissionId int
*/
public void setTransmissionId(int transmissionId) {
this.transmissionId = transmissionId;
}
/**
* Returns drive type id.
*
* @return int
*/
public int getDriveTypeId() {
return driveTypeId;
}
/**
* Sets car driveType id.
*
* @param driveTypeId int
*/
public void setDriveTypeId(int driveTypeId) {
this.driveTypeId = driveTypeId;
}
}
|
gvlproject/moto | tests/test_ec2/test_customer_gateways.py | from __future__ import unicode_literals
import boto
import sure # noqa
from nose.tools import assert_raises
from nose.tools import assert_false
from boto.exception import EC2ResponseError
from moto import mock_ec2_deprecated
@mock_ec2_deprecated
def test_create_customer_gateways():
conn = boto.connect_vpc('the_key', 'the_secret')
customer_gateway = conn.create_customer_gateway(
'ipsec.1', '172.16.17.32', 65534)
customer_gateway.should_not.be.none
customer_gateway.id.should.match(r'cgw-\w+')
customer_gateway.type.should.equal('ipsec.1')
customer_gateway.bgp_asn.should.equal(65534)
customer_gateway.ip_address.should.equal('172.16.17.32')
@mock_ec2_deprecated
def test_describe_customer_gateways():
conn = boto.connect_vpc('the_key', 'the_secret')
customer_gateway = conn.create_customer_gateway(
'ipsec.1', '172.16.17.32', 65534)
cgws = conn.get_all_customer_gateways()
cgws.should.have.length_of(1)
cgws[0].id.should.match(customer_gateway.id)
@mock_ec2_deprecated
def test_delete_customer_gateways():
conn = boto.connect_vpc('the_key', 'the_secret')
customer_gateway = conn.create_customer_gateway(
'ipsec.1', '172.16.17.32', 65534)
customer_gateway.should_not.be.none
cgws = conn.get_all_customer_gateways()
cgws[0].id.should.match(customer_gateway.id)
deleted = conn.delete_customer_gateway(customer_gateway.id)
cgws = conn.get_all_customer_gateways()
cgws.should.have.length_of(0)
@mock_ec2_deprecated
def test_delete_customer_gateways_bad_id():
conn = boto.connect_vpc('the_key', 'the_secret')
with assert_raises(EC2ResponseError) as cm:
conn.delete_customer_gateway('cgw-0123abcd')
|
mimeyy/dylansbang | app/src/main/java/com/threerings/jme/tile/TileFringer.java | <reponame>mimeyy/dylansbang
//
// $Id$
//
// Nenya library - tools for developing networked games
// Copyright (C) 2002-2010 Three Rings Design, Inc., All Rights Reserved
// http://code.google.com/p/nenya/
//
// This library is free software; you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation; either version 2.1 of the License, or
// (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.threerings.jme.tile;
import java.awt.Graphics2D;
import java.awt.Transparency;
import java.awt.image.BufferedImage;
import java.util.List;
import java.util.Map;
import com.google.common.collect.Lists;
import com.samskivert.util.QuickSort;
import com.threerings.media.image.ImageUtil;
import com.threerings.media.tile.TileUtil;
import static com.threerings.jme.Log.log;
/**
* Computes fringe tile images according to the rules in an associated
* fringe configuration.
*/
public class TileFringer
{
public static interface TileSource
{
/** Returns the type of tile at the specified coordinates or -1 if
* there is no tile at this coordinate. */
public String getTileType (int x, int y);
/** Returns the tile type to use when a coordinate has no tile. */
public String getDefaultType ();
}
public static interface ImageSource extends ImageUtil.ImageCreator
{
/** Creates a blank image into which various fringe images will be
* composited. */
public BufferedImage createImage (
int width, int height, int transparency);
/** Returns the source image for a tile of the specified type.
* This can be randomly selected and change from call to call. */
public BufferedImage getTileSource (String type);
/** Returns the named fringe source image (one long strip). */
public BufferedImage getFringeSource (String name);
}
/**
* Creates a fringer that will fringe according to the rules in the
* supplied configuration.
*/
public TileFringer (FringeConfiguration config, ImageSource isrc)
{
_config = config;
_isrc = isrc;
}
/**
* Computes, creates and returns the base tile with the appropriate
* fringe imagery applied to it for the specified location.
*
* @param masks used to cache intermediate images of tiles cut out
* using a fringe mask.
*/
public BufferedImage getFringeTile (
TileSource tiles, int col, int row, Map<String, BufferedImage> masks)
{
// get the type of the tile we are considering
String baseType = tiles.getTileType(col, row);
if (baseType == null) {
baseType = tiles.getDefaultType();
}
// start with an empty fringer list
FringerRec fringers = null;
// walk through our influence tiles
for (int y = row - 1, maxy = row + 2; y < maxy; y++) {
for (int x = col - 1, maxx = col + 2; x < maxx; x++) {
// we sensibly do not consider ourselves
if ((x == col) && (y == row)) {
continue;
}
// determine the type of our fringing neighbor
String fringerType = tiles.getTileType(x, y);
if (fringerType == null) {
fringerType = tiles.getDefaultType();
}
// determine if it fringes on our tile
int pri = _config.fringesOn(fringerType, baseType);
if (pri == -1) {
continue;
}
FringerRec fringer = (fringers == null) ?
null : fringers.find(fringerType);
if (fringer == null) {
fringer = fringers =
new FringerRec(fringerType, pri, fringers);
}
// now turn on the appropriate fringebits
int dy = y - row, dx = x - col;
fringer.bits |= FLAGMATRIX[dy*3+dx+4];
}
}
// if nothing fringed, we're done
if (fringers == null) {
return null;
}
// otherwise compose a fringe tile from the specified fringes
return composeFringeTile(
baseType, fringers.toArray(), masks, TileUtil.getTileHash(col, row));
}
/**
* Compose a fringe tile out of the various fringe images needed.
*/
protected BufferedImage composeFringeTile (
String baseType, FringerRec[] fringers, Map<String, BufferedImage> masks, int hashValue)
{
// sort the array so that higher priority fringers get drawn first
QuickSort.sort(fringers);
BufferedImage source = _isrc.getTileSource(baseType);
if (source == null) {
log.warning("Missing source tile [type=" + baseType + "].");
return null;
}
BufferedImage ftimg = _isrc.createImage(
source.getWidth(), source.getHeight(), Transparency.OPAQUE);
Graphics2D gfx = (Graphics2D)ftimg.getGraphics();
try {
// start with the base tile image
gfx.drawImage(source, 0, 0, null);
// and stamp the fringers on top of it
for (int ii = 0; ii < fringers.length; ii++) {
int[] indexes = getFringeIndexes(fringers[ii].bits);
for (int jj = 0; jj < indexes.length; jj++) {
stampTileImage(gfx, fringers[ii].fringerType,
indexes[jj], masks, hashValue);
}
}
} finally {
gfx.dispose();
}
return ftimg;
}
/**
* Looks up or creates the appropriate fringe mask and draws it into
* the supplied graphics context.
*/
protected void stampTileImage (
Graphics2D gfx, String fringerType, int index,
Map<String, BufferedImage> masks, int hashValue)
{
FringeConfiguration.FringeRecord frec =
_config.getFringe(fringerType, hashValue);
BufferedImage fsimg = (frec == null) ? null :
_isrc.getFringeSource(frec.name);
if (fsimg == null) {
log.warning("Missing fringe source image [type=" + fringerType +
", hash=" + hashValue + ", frec=" + frec + "].");
return;
}
if (frec.mask) {
// it's a mask; look for it in the cache
String maskkey = fringerType + ":" + frec.name + ":" + index;
BufferedImage mimg = masks.get(maskkey);
if (mimg == null) {
BufferedImage fsrc = getSubimage(fsimg, index);
BufferedImage bsrc = _isrc.getTileSource(fringerType);
mimg = ImageUtil.composeMaskedImage(_isrc, fsrc, bsrc);
masks.put(maskkey, mimg);
}
gfx.drawImage(mimg, 0, 0, null);
} else {
// this is a non-mask image so just use the data from the
// fringe source image directly
gfx.drawImage(getSubimage(fsimg, index), 0, 0, null);
}
}
/**
* Returns the <code>index</code>th tile image from the supplied
* source image. The source image is assumed to be a single strip of
* tile images, each with equal width and height.
*/
protected BufferedImage getSubimage (BufferedImage source, int index)
{
int size = source.getHeight(), x = size * index;
return source.getSubimage(x, 0, size, size);
}
/**
* Get the fringe index specified by the fringebits. If no index
* is available, try breaking down the bits into contiguous regions of
* bits and look for indexes for those.
*/
protected int[] getFringeIndexes (int bits)
{
int index = BITS_TO_INDEX[bits];
if (index != -1) {
int[] ret = new int[1];
ret[0] = index;
return ret;
}
// otherwise, split the bits into contiguous components
// look for a zero and start our first split
int start = 0;
while ((((1 << start) & bits) != 0) && (start < NUM_FRINGEBITS)) {
start++;
}
if (start == NUM_FRINGEBITS) {
// we never found an empty fringebit, and since index (above)
// was already -1, we have no fringe tile for these bits.. sad.
return new int[0];
}
List<Integer> indexes = Lists.newArrayList();
int weebits = 0;
for (int ii=(start + 1) % NUM_FRINGEBITS; ii != start;
ii = (ii + 1) % NUM_FRINGEBITS) {
if (((1 << ii) & bits) != 0) {
weebits |= (1 << ii);
} else if (weebits != 0) {
index = BITS_TO_INDEX[weebits];
if (index != -1) {
indexes.add(Integer.valueOf(index));
}
weebits = 0;
}
}
if (weebits != 0) {
index = BITS_TO_INDEX[weebits];
if (index != -1) {
indexes.add(Integer.valueOf(index));
}
}
int[] ret = new int[indexes.size()];
for (int ii=0; ii < ret.length; ii++) {
ret[ii] = indexes.get(ii).intValue();
}
return ret;
}
/** A record for holding information about a particular fringe as
* we're computing what it will look like. */
protected static class FringerRec implements Comparable<FringerRec>
{
public String fringerType;
public int priority;
public int bits;
public FringerRec next;
public FringerRec (String type, int pri, FringerRec next) {
fringerType = type;
priority = pri;
this.next = next;
}
public FringerRec find (String type)
{
if (fringerType.equals(type)) {
return this;
} else if (next != null) {
return next.find(type);
} else {
return null;
}
}
public FringerRec[] toArray ()
{
return toArray(0);
}
public int compareTo (FringerRec o) {
return priority - o.priority;
}
public String toString () {
return "[type=" + fringerType + ", pri=" + priority +
", bits=" + Integer.toString(bits, 16) + "]";
}
protected FringerRec[] toArray (int index)
{
FringerRec[] array;
if (next == null) {
array = new FringerRec[index+1];
} else {
array = next.toArray(index+1);
}
array[index] = this;
return array;
}
}
protected static final int NORTH = 1 << 0;
protected static final int NORTHEAST = 1 << 1;
protected static final int EAST = 1 << 2;
protected static final int SOUTHEAST = 1 << 3;
protected static final int SOUTH = 1 << 4;
protected static final int SOUTHWEST = 1 << 5;
protected static final int WEST = 1 << 6;
protected static final int NORTHWEST = 1 << 7;
protected static final int NUM_FRINGEBITS = 8;
/** A matrix mapping adjacent tiles to which fringe bits they affect.
* (x and y are offset by +1, since we can't have -1 as an array index).
* These are "upside down" thanks to OpenGL. */
protected static final int[] FLAGMATRIX = {
SOUTHWEST, (SOUTHEAST | SOUTH | SOUTHWEST), SOUTHEAST,
(NORTHWEST | WEST | SOUTHWEST), 0, (NORTHEAST | EAST | SOUTHEAST),
NORTHWEST, (NORTHWEST | NORTH | NORTHEAST), NORTHEAST,
};
/** The fringe tiles we use. These are the 17 possible tiles made up
* of continuous fringebits sections. */
protected static final int[] FRINGETILES = {
SOUTHEAST,
SOUTHWEST | SOUTH | SOUTHEAST,
SOUTHWEST,
NORTHEAST | EAST | SOUTHEAST,
NORTHWEST | WEST | SOUTHWEST,
NORTHEAST,
NORTHWEST | NORTH | NORTHEAST,
NORTHWEST,
SOUTHWEST | WEST | NORTHWEST | NORTH | NORTHEAST,
NORTHWEST | NORTH | NORTHEAST | EAST | SOUTHEAST,
NORTHWEST | WEST | SOUTHWEST | SOUTH | SOUTHEAST,
SOUTHWEST | SOUTH | SOUTHEAST | EAST | NORTHEAST,
NORTHEAST | NORTH | NORTHWEST | WEST | SOUTHWEST | SOUTH | SOUTHEAST,
SOUTHEAST | EAST | NORTHEAST | NORTH | NORTHWEST | WEST | SOUTHWEST,
SOUTHWEST | SOUTH | SOUTHEAST | EAST | NORTHEAST | NORTH | NORTHWEST,
NORTHWEST | WEST | SOUTHWEST | SOUTH | SOUTHEAST | EAST | NORTHEAST,
// all the directions!
NORTH | NORTHEAST | EAST | SOUTHEAST | SOUTH | SOUTHWEST |
WEST | NORTHWEST
};
/** A reverse map of the {@link #FRINGETILES} array, for quickly
* looking up which tile we want. */
protected static final int[] BITS_TO_INDEX;
// Construct the BITS_TO_INDEX array.
static {
int num = (1 << NUM_FRINGEBITS);
BITS_TO_INDEX = new int[num];
// first clear everything to -1 (meaning there is no tile defined)
for (int ii=0; ii < num; ii++) {
BITS_TO_INDEX[ii] = -1;
}
// then fill in with the defined tiles.
for (int ii=0; ii < FRINGETILES.length; ii++) {
BITS_TO_INDEX[FRINGETILES[ii]] = ii;
}
}
protected ImageSource _isrc;
protected FringeConfiguration _config;
}
|
dbrower/bendo | server/db_mysql.go | <reponame>dbrower/bendo
package server
import (
"bytes"
"database/sql"
"encoding/json"
"log"
"strings"
"time"
// no _ in import mysql since we need mysql.NullTime
"github.com/BurntSushi/migration"
raven "github.com/getsentry/raven-go"
"github.com/go-sql-driver/mysql"
"github.com/ndlib/bendo/items"
)
// This file contains code implementing various caching interfaces to use
// MySQL as a storage medium.
// MsqlCache implements the items.ItemCache interface and the FixityDB interface
// using MySQL as the backing store.
type MsqlCache struct {
db *sql.DB
}
var _ items.ItemCache = &MsqlCache{}
var _ FixityDB = &MsqlCache{}
var _ BlobDB = &MsqlCache{}
// List of migrations to perform. Add new ones to the end.
// DO NOT change the order of items already in this list.
var mysqlMigrations = []migration.Migrator{
mysqlschema1,
mysqlschema2,
mysqlschema3,
mysqlschema4,
}
// Adapt the schema versioning for MySQL
var mysqlVersioning = dbVersion{
GetSQL: `SELECT max(version) FROM migration_version`,
SetSQL: `INSERT INTO migration_version (version, applied) VALUES (?, now())`,
CreateSQL: `CREATE TABLE migration_version (version INTEGER, applied datetime)`,
}
// NewMysqlCache connects to a MySQL database and returns an item satisifying
// both the ItemCache and FixityDB interfaces.
func NewMysqlCache(dial string) (*MsqlCache, error) {
db, err := migration.OpenWith(
"mysql",
dial,
mysqlMigrations,
mysqlVersioning.Get,
mysqlVersioning.Set)
if err != nil {
log.Println("Open Mysql", err)
return nil, err
}
return &MsqlCache{db: db}, nil
}
// Lookup returns a cached Item, if one exists in the database.
// Otherwise it returns nil.
func (ms *MsqlCache) Lookup(id string) *items.Item {
const dbLookup = `SELECT value FROM items WHERE item = ? LIMIT 1`
var value string
err := ms.db.QueryRow(dbLookup, id).Scan(&value)
if err != nil {
if err != sql.ErrNoRows {
// some kind of error...treat it as a miss
log.Println("Item Cache: ", err)
raven.CaptureError(err, nil)
}
return nil
}
// unserialize the json string
var thisItem = new(items.Item)
err = json.Unmarshal([]byte(value), thisItem)
if err != nil {
log.Println("Item Cache: error in lookup:", err)
raven.CaptureError(err, nil)
return nil
}
return thisItem
}
// Set adds the given item to the cache under the key id.
func (ms *MsqlCache) Set(id string, thisItem *items.Item) {
var created, modified time.Time
var size int64
for i := range thisItem.Blobs {
size += thisItem.Blobs[i].Size
}
if len(thisItem.Versions) > 0 {
created = thisItem.Versions[0].SaveDate
modified = thisItem.Versions[len(thisItem.Versions)-1].SaveDate
}
value, err := json.Marshal(thisItem)
if err != nil {
log.Println("Item Cache:", err)
raven.CaptureError(err, nil)
return
}
stmt := `INSERT INTO items (item, created, modified, size, value) VALUES (?, ?, ?, ?, ?) ON DUPLICATE KEY UPDATE created=?, modified=?, size=?, value=?`
_, err = ms.db.Exec(stmt, id, created, modified, size, value, created, modified, size, value)
if err != nil {
log.Printf("Item Cache: %s", err.Error())
return
}
ms.IndexItem(id, thisItem)
}
func (ms *MsqlCache) FindBlob(item string, blobid int) (*items.Blob, error) {
const query = `
SELECT size, bundle, created, creator, MD5, SHA256, mimetype,
deleted, deleter, deletenote
FROM blobs
WHERE item = ? AND blobid = ?
LIMIT 1`
var b items.Blob
var dDeleted mysql.NullTime
var dSave mysql.NullTime
err := ms.db.QueryRow(query, item, blobid).Scan(&b.Size, &b.Bundle, &dSave, &b.Creator, &b.MD5, &b.SHA256, &b.MimeType, &dDeleted, &b.Deleter, &b.DeleteNote)
b.ID = items.BlobID(blobid)
if dSave.Valid {
b.SaveDate = dSave.Time
}
if dDeleted.Valid {
b.DeleteDate = dDeleted.Time
}
if err == sql.ErrNoRows {
return nil, nil
}
return &b, err
}
func (ms *MsqlCache) getMaxBlob(item string) (int, error) {
const maxblob = `
SELECT max(blobid)
FROM blobs
WHERE item = ?`
var blob sql.NullInt64
err := ms.db.QueryRow(maxblob, item).Scan(&blob)
if err == sql.ErrNoRows {
err = nil
}
if blob.Valid {
return int(blob.Int64), err
}
return 0, err
}
func (ms *MsqlCache) getMaxVersion(item string) (int, error) {
const maxversion = `
SELECT max(versionid)
FROM versions
WHERE item = ?`
var version sql.NullInt64
err := ms.db.QueryRow(maxversion, item).Scan(&version)
if err == sql.ErrNoRows {
err = nil
}
if version.Valid {
return int(version.Int64), err
}
return 0, nil
}
func (ms *MsqlCache) FindBlobBySlot(item string, version int, slot string) (*items.Blob, error) {
if version == 0 {
var err error
version, err = ms.getMaxVersion(item)
if err != nil || version == 0 {
return nil, err
}
}
// we do the resolution in two steps for simplicity
const query = `
SELECT blobid
FROM slots
WHERE item = ? AND versionid = ? AND name = ?
LIMIT 1`
var bid int
err := ms.db.QueryRow(query, item, version, slot).Scan(&bid)
if bid == 0 || err == sql.ErrNoRows {
return nil, nil
}
if err != nil {
return nil, err
}
return ms.FindBlob(item, bid)
}
func (ms *MsqlCache) GetItemList(offset int, pagesize int, sortorder string) ([]SimpleItem, error) {
query, args := buildItemListQuery(offset, pagesize, sortorder)
var results []SimpleItem
rows, err := ms.db.Query(query, args...)
if err == sql.ErrNoRows {
// no next record
return results, nil
} else if err != nil {
log.Println("GetItemList Query MySQL", err)
raven.CaptureError(err, nil)
return results, nil
}
defer rows.Close()
for rows.Next() {
var rec = SimpleItem{}
var created mysql.NullTime
var modified mysql.NullTime
err = rows.Scan(&rec.ID, &created, &modified, &rec.Size)
if err != nil {
log.Println("GetItemList Scan MySQL", err)
raven.CaptureError(err, nil)
continue
}
if created.Valid {
rec.Created = created.Time
}
if modified.Valid {
rec.Modified = modified.Time
}
results = append(results, rec)
}
return results, nil
}
// construct an return an sql query and parameter list, using the parameters passed
func buildItemListQuery(offset int, pagesize int, sortorder string) (string, []interface{}) {
var query bytes.Buffer
// The mysql driver does not have positional parameters, so we build the
// parameter list in parallel to the query.
var args []interface{}
query.WriteString("SELECT item, created, modified, size FROM items ")
sortcolumn := ""
decending := false
if strings.HasPrefix(sortorder, "-") {
decending = true
sortorder = sortorder[1:]
}
switch sortorder {
case "name":
sortcolumn = "item"
case "size":
sortcolumn = "size"
case "modified":
sortcolumn = "modified"
case "created":
sortcolumn = "created"
}
if sortcolumn != "" {
query.WriteString("ORDER BY ")
query.WriteString(sortcolumn)
if decending {
query.WriteString(" DESC ")
}
}
query.WriteString(" LIMIT ? ")
args = append(args, pagesize)
if offset > 0 {
query.WriteString("OFFSET ? ")
args = append(args, offset)
}
return query.String(), args
}
// IndexItem adds row entries for every version, slot, and blob
// for the given item. It is ok if some pieces are already in the tables.
func (ms *MsqlCache) IndexItem(item string, thisItem *items.Item) error {
// first update blobs. This isn't perfect. While a blob record doesn't
// change often, it is possible. The Bundle id, the mime type or the deleted
// flags could be changed. Not sure how to handle that. It seems inefficient
// to check the records already in the table. maybe we need a way to track
// changes to blob records so we can only update those.
maxblob, err := ms.getMaxBlob(item)
if err != nil {
return err
}
maxversion, err := ms.getMaxVersion(item)
if err != nil {
return err
}
tx, err := ms.db.Begin()
if err != nil {
return err
}
// add/update blobs
for _, blob := range thisItem.Blobs {
var dd mysql.NullTime
if !blob.DeleteDate.IsZero() {
dd.Time = blob.DeleteDate
dd.Valid = true
}
if int(blob.ID) > maxblob {
const insertblob = `INSERT INTO blobs
(item, blobid, size, bundle, created, creator, MD5, SHA256,
mimetype, deleted, deleter, deletenote)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
_, err = tx.Exec(insertblob, item, blob.ID, blob.Size, blob.Bundle,
blob.SaveDate, blob.Creator, blob.MD5, blob.SHA256,
blob.MimeType, dd, blob.Deleter, blob.DeleteNote)
} else {
const updateblob = `UPDATE blobs SET
bundle = ?,
mimetype = ?,
deleted = ?,
deleter = ?,
deletenote = ?
WHERE item = ? AND blobid = ?`
_, err = tx.Exec(updateblob, blob.Bundle, blob.MimeType,
dd, blob.Deleter, blob.DeleteNote, item, blob.ID)
}
if err != nil {
tx.Rollback()
return err
}
}
// update the version and slot tables. These should not change once created,
// so we do not have the update problem as the blobs do
for _, v := range thisItem.Versions {
if v.ID <= items.VersionID(maxversion) {
continue // this version has already been indexed
}
const insertver = `INSERT INTO versions
(item, versionid, created, creator, note)
VALUES (?, ?, ?, ?, ?)`
_, err := tx.Exec(insertver, item, v.ID, v.SaveDate, v.Creator, v.Note)
if err != nil {
tx.Rollback()
return err
}
for slot, bid := range v.Slots {
const insertslot = `INSERT INTO slots
(item, versionid, blobid, name)
VALUES (?, ?, ?, ?)`
_, err := tx.Exec(insertslot, item, v.ID, bid, slot)
if err != nil {
tx.Rollback()
return err
}
}
}
return tx.Commit()
}
// NextFixity returns the earliest scheduled fixity record
// that is before the cutoff time. If there is no such record
// it returns 0
func (mc *MsqlCache) NextFixity(cutoff time.Time) int64 {
const query = `
SELECT id
FROM fixity
WHERE status = "scheduled" AND scheduled_time <= ?
ORDER BY scheduled_time
LIMIT 1`
var id int64
err := mc.db.QueryRow(query, cutoff).Scan(&id)
if err == sql.ErrNoRows {
return 0
} else if err != nil {
log.Println("nextfixity", err)
raven.CaptureError(err, nil)
return 0
}
return id
}
// GetFixity
func (mc *MsqlCache) GetFixity(id int64) *Fixity {
const query = `
SELECT id, item, scheduled_time, status, notes
FROM fixity
WHERE id = ?
LIMIT 1`
var rec Fixity
var when mysql.NullTime
err := mc.db.QueryRow(query, id).Scan(&rec.ID, &rec.Item, &when, &rec.Status, &rec.Notes)
if err == sql.ErrNoRows {
return nil
} else if err != nil {
log.Println("GetFixtyByID MySQL queryrow", err)
raven.CaptureError(err, nil)
return nil
}
// Handle for null time value
if when.Valid {
rec.ScheduledTime = when.Time
}
return &rec
}
// SearchFixity
func (mc *MsqlCache) SearchFixity(start, end time.Time, item string, status string) []*Fixity {
query, args := buildQuery(start, end, item, status)
var results []*Fixity
rows, err := mc.db.Query(query, args...)
if err == sql.ErrNoRows {
// no next record
return nil
} else if err != nil {
log.Println("GetFixity Query MySQL", err)
raven.CaptureError(err, nil)
return nil
}
defer rows.Close()
for rows.Next() {
var rec = new(Fixity)
var when mysql.NullTime
err = rows.Scan(&rec.ID, &rec.Item, &when, &rec.Status, &rec.Notes)
if err != nil {
log.Println("GetFixity Scan MySQL", err)
raven.CaptureError(err, nil)
continue
}
if when.Valid {
rec.ScheduledTime = when.Time
}
results = append(results, rec)
}
return results
}
// construct an return an sql query and parameter list, using the parameters passed
func buildQuery(start, end time.Time, item string, status string) (string, []interface{}) {
var query bytes.Buffer
// The mysql driver does not have positional parameters, so we build the
// parameter list in parallel to the query.
var args []interface{}
query.WriteString("SELECT id, item, scheduled_time, status, notes FROM fixity")
conjunction := " WHERE "
if !start.IsZero() {
query.WriteString(conjunction + "scheduled_time >= ?")
conjunction = " AND "
args = append(args, start)
}
if !end.IsZero() {
query.WriteString(conjunction + "scheduled_time <= ?")
conjunction = " AND "
args = append(args, end)
}
if item != "" {
query.WriteString(conjunction + "item = ?")
conjunction = " AND "
args = append(args, item)
}
if status != "" {
query.WriteString(conjunction + "status = ?")
args = append(args, status)
}
query.WriteString(" ORDER BY scheduled_time")
return query.String(), args
}
// UpdateFixity updates or creates the given fixity record. The record is created if
// ID is == 0. Otherwise the given record is updated so long as
// the record in the database has status "scheduled".
// The ID of the new or updated record is returned.
func (mc *MsqlCache) UpdateFixity(record Fixity) (int64, error) {
if record.Status == "" {
record.Status = "scheduled"
}
if record.ID == 0 {
// new record
const stmt = `INSERT INTO fixity (item, scheduled_time, status, notes) VALUES (?,?,?,?)`
result, err := mc.db.Exec(stmt, record.Item, record.ScheduledTime, record.Status, record.Notes)
var id int64
if err == nil {
id, _ = result.LastInsertId()
}
return id, err
}
// update existing record
const stmt = `
UPDATE fixity
SET item = ?, status = ?, notes = ?, scheduled_time = ?
WHERE id = ? and status = "scheduled"
LIMIT 1`
_, err := mc.db.Exec(stmt, record.Item, record.Status, record.Notes, record.ScheduledTime, record.ID)
return record.ID, err
}
func (mc *MsqlCache) DeleteFixity(id int64) error {
const stmt = `DELETE FROM fixity WHERE id = ? AND status = "scheduled"`
_, err := mc.db.Exec(stmt, id)
return err
}
// LookupCheck will return the time of the earliest scheduled fixity
// check for the given item. If there is no pending fixity check for
// the item, it returns the zero time.
func (mc *MsqlCache) LookupCheck(item string) (time.Time, error) {
const query = `
SELECT scheduled_time
FROM fixity
WHERE item = ? AND status = "scheduled"
ORDER BY scheduled_time
LIMIT 1`
var when mysql.NullTime
err := mc.db.QueryRow(query, item).Scan(&when)
if err == sql.ErrNoRows {
err = nil
}
if when.Valid {
return when.Time, err
}
return time.Time{}, err
}
// database migrations. each one is a go function. Add them to the
// list mysqlMigrations at top of this file for them to be run.
func mysqlschema1(tx migration.LimitedTx) error {
var s = []string{
`CREATE TABLE IF NOT EXISTS items (
id varchar(255),
created datetime,
modified datetime,
size int,
value text)`,
`CREATE TABLE IF NOT EXISTS fixity (
id varchar(255),
scheduled_time datetime,
status varchar(32),
notes text)`,
}
return execlist(tx, s)
}
func mysqlschema2(tx migration.LimitedTx) error {
var s = []string{
`ALTER TABLE items CHANGE COLUMN id item varchar(255)`,
`ALTER TABLE fixity CHANGE COLUMN id item varchar(255)`,
`ALTER TABLE fixity ADD COLUMN id int PRIMARY KEY AUTO_INCREMENT FIRST`,
`ALTER TABLE items ADD COLUMN id int PRIMARY KEY AUTO_INCREMENT FIRST`,
}
return execlist(tx, s)
}
func mysqlschema3(tx migration.LimitedTx) error {
var s = []string{
`CREATE TEMPORARY TABLE mult_ids AS SELECT item FROM items GROUP BY item HAVING count(*) > 1`,
`DELETE FROM items WHERE item IN (SELECT * from mult_ids)`,
`ALTER TABLE items ADD UNIQUE INDEX items_item (item), CHANGE COLUMN value value LONGTEXT, CHANGE COLUMN size size BIGINT`,
}
return execlist(tx, s)
}
func mysqlschema4(tx migration.LimitedTx) error {
var s = []string{
`CREATE TABLE IF NOT EXISTS blobs (
id int PRIMARY KEY AUTO_INCREMENT,
item varchar(255),
blobid int,
size int,
bundle int,
created datetime,
creator varchar(64),
MD5 binary(16),
SHA256 binary(32),
mimetype varchar(64),
deleted datetime,
deleter varchar(64),
deletenote text,
INDEX i_item (item),
INDEX i_itemblob (item, blobid)
)`,
`CREATE TABLE IF NOT EXISTS versions (
id int PRIMARY KEY AUTO_INCREMENT,
item varchar(255),
versionid int,
created datetime,
creator varchar(64),
note text,
INDEX i_item (item),
INDEX i_itemversion (item, versionid) )`,
`CREATE TABLE IF NOT EXISTS slots (
id int PRIMARY KEY AUTO_INCREMENT,
item varchar(255),
versionid int,
blobid int,
name varchar(1024),
INDEX i_item (item),
INDEX i_name (name),
INDEX i_itemversion (item, versionid) )`,
}
return execlist(tx, s)
}
// execlist exec's each item in the list, return if there is an error.
// Used to work around mysql driver not handling compound exec statements.
func execlist(tx migration.LimitedTx, stms []string) error {
var err error
for _, s := range stms {
_, err = tx.Exec(s)
if err != nil {
break
}
}
return err
}
|
work-mohit/Placement-Practice | Love Babber OnGoing Placement/BinarySearch/PeakOfTheMountain.cpp | <reponame>work-mohit/Placement-Practice<gh_stars>0
class Solution {
public:
int peakIndexInMountainArray(vector<int>& arr) {
int n = arr.size();
int s =0;
int e = n-1;
int mid= s + (e-s)/2;
while(s < e){
if(arr[mid] < arr[mid+1]){
s = mid + 1;
}else{
e = mid;
}
mid= s + (e-s)/2;
}
return mid;
}
};
//////////////////////////////////////////////////////
class Solution {
public:
int peakIndexInMountainArray(vector<int>& arr) {
int start = 0;
int end = arr.size()-1;
int middle;
int peak_index;
while(start <= end)
{
middle = (start + end)/2;
// Comparing with the next element
if(arr[middle] > arr[middle + 1])
{
peak_index = middle;
end = middle - 1;
}
else if(arr[middle] < arr[middle + 1])
start = middle + 1;
// Comparing with the previous element
else if(arr[middle] > arr[middle - 1])
{
peak_index = middle;
start = middle + 1;
}
else if (arr[middle] < arr[middle -1])
end = middle - 1;
}
return peak_index;
}
}; |
intranetmouse/RayTracerChallenge | graphics.raytrace/src/test/java/org/intranet/graphics/raytrace/steps/CucumberTests.java | package org.intranet.graphics.raytrace.steps;
import org.junit.runner.RunWith;
import io.cucumber.junit.Cucumber;
import io.cucumber.junit.CucumberOptions;
import io.cucumber.junit.CucumberOptions.SnippetType;
@RunWith(value = Cucumber.class)
@CucumberOptions(monochrome = true, plugin = {"pretty", "summary"},
strict = true,
glue = "classpath:/org/intranet/graphics/raytrace/steps",
features = "classpath:/features", snippets = SnippetType.CAMELCASE)
public class CucumberTests
{
}
|
aspireias-ins/CodebaseAppIOS | node_modules/appium/node_modules/appium-espresso-driver/espresso-server/app/src/androidTest/java/io/appium/espressoserver/lib/viewaction/RootViewFinder.java | package io.appium.espressoserver.lib.viewaction;
import android.support.test.espresso.UiController;
import android.support.test.espresso.ViewAction;
import android.view.View;
import org.hamcrest.Matcher;
import static android.support.test.espresso.Espresso.onView;
import static android.support.test.espresso.matcher.ViewMatchers.isRoot;
/**
* Get the Root View of the Android App
* Hack solution that makes use of Espresso ViewActions
*/
public class RootViewFinder {
private final View[] views = {null};
/**
* To get the root view we implement a custom ViewAction that simply takes the View
* and then saves it to an array in it's parent class.
*/
private class GetViewAction implements ViewAction {
@Override
public Matcher<View> getConstraints() {
return isRoot();
}
@Override
public String getDescription() {
return "getting root view of application";
}
@Override
public void perform(UiController uiController, View view) {
views[0] = view;
}
}
/**
* This function calls the above view action which saves the view to 'views' array
* and then returns it
* @return The root
*/
public View getRootView() {
onView(isRoot()).perform(new GetViewAction());
return views[0];
}
}
|
weikano/TAKotlin | login/src/main/java/com/kingsunsoft/sdk/login/net/request/LoginReq.java | <gh_stars>0
package com.kingsunsoft.sdk.login.net.request;
import android.util.Log;
import com.qq.tars.protocol.tars.TarsOutputStream;
import com.kingsunsoft.sdk.login.exception.ResponseValidateError;
import com.kingsunsoft.sdk.mod.Header;
import com.kingsunsoft.sdk.mod.Response;
import com.kingsunsoft.sdk.modsdk.LoginRequest;
import com.kingsunsoft.sdk.modsdk.LoginResponse;
import com.kingsunsoft.sdk.login.module.Mapper;
import com.kingsunsoft.sdk.login.module.User;
import com.kingsunsoft.sdk.login.net.Api;
import com.kingsunsoft.sdk.login.net.request.base.ModRequest;
import com.kingsunsoft.sdk.login.net.utils.TarsUtils;
import io.reactivex.Maybe;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.annotations.NonNull;
import io.reactivex.functions.Function;
/**
* Created by hu.yang on 2017/5/8.
*/
public class LoginReq extends ModRequest<User> {
public LoginReq(Object... params) {
super(LoginResponse.class, params);
}
@Override
public Header makeHeader() {
Header header = super.makeHeader();
header.token = "";
header.account = "";
header.refreshToken = "";
return header;
}
@Override
public String getModRequestName() {
return LoginRequest.class.getSimpleName();
}
@Override
public byte[] makeReqBinary(Object[] params) {
LoginRequest loginReq = new LoginRequest();
TarsOutputStream outputStream = new TarsOutputStream();
loginReq.setUsername((String) params[0]);
loginReq.setPassword((String) params[1]);
loginReq.setVerifyCode((String) params[2]);
loginReq.setWxAppId((String) params[3]);
loginReq.setWxLoginCode((String) params[4]);
loginReq.writeTo(outputStream);
return outputStream.toByteArray();
}
@Override
protected boolean validateRsp(User rsp) {
return true;
}
@Override
public Maybe<User> sendRequest() {
createHeaderAndBody();
return call().map(new Function<Response, LoginResponse>() {
@Override
public LoginResponse apply(@NonNull Response response) throws Exception {
LoginResponse result = (LoginResponse) TarsUtils.getBodyRsp(LoginResponse.class, response.body);
if (result.userInfo != null && result.userInfo.userId != 0){
return result;
}
throw new ResponseValidateError();
}
}).map(new Function<LoginResponse, User>() {
@Override
public User apply(@NonNull LoginResponse response) throws Exception {
User userEntity = Mapper.mapperUserEntity(response.userInfo, getHeader());
if (!userEntity.save())
Log.e("KingSunSDK", "持久化用户数据失败!");
return userEntity;
}
}).observeOn(AndroidSchedulers.mainThread());
}
@Override
protected Maybe<Response> getRequestSingle(Api api) {
return api.loginReq(this);
}
}
|
ArriolaHarold2001/addedlamps | build/tmp/expandedArchives/forge-1.17.1-37.0.58_mapped_official_1.17.1-sources.jar_461b1baaba5fdaecf94c73039d52c00b/net/minecraft/commands/arguments/item/ItemArgument.java | package net.minecraft.commands.arguments.item;
import com.mojang.brigadier.StringReader;
import com.mojang.brigadier.arguments.ArgumentType;
import com.mojang.brigadier.context.CommandContext;
import com.mojang.brigadier.exceptions.CommandSyntaxException;
import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
import java.util.Arrays;
import java.util.Collection;
import java.util.concurrent.CompletableFuture;
import net.minecraft.tags.ItemTags;
public class ItemArgument implements ArgumentType<ItemInput> {
private static final Collection<String> EXAMPLES = Arrays.asList("stick", "minecraft:stick", "stick{foo=bar}");
public static ItemArgument item() {
return new ItemArgument();
}
public ItemInput parse(StringReader p_120962_) throws CommandSyntaxException {
ItemParser itemparser = (new ItemParser(p_120962_, false)).parse();
return new ItemInput(itemparser.getItem(), itemparser.getNbt());
}
public static <S> ItemInput getItem(CommandContext<S> p_120964_, String p_120965_) {
return p_120964_.getArgument(p_120965_, ItemInput.class);
}
public <S> CompletableFuture<Suggestions> listSuggestions(CommandContext<S> p_120968_, SuggestionsBuilder p_120969_) {
StringReader stringreader = new StringReader(p_120969_.getInput());
stringreader.setCursor(p_120969_.getStart());
ItemParser itemparser = new ItemParser(stringreader, false);
try {
itemparser.parse();
} catch (CommandSyntaxException commandsyntaxexception) {
}
return itemparser.fillSuggestions(p_120969_, ItemTags.getAllTags());
}
public Collection<String> getExamples() {
return EXAMPLES;
}
} |
onap/appc | appc-directed-graph/appc-dgraph/provider/src/test/java/org/onap/appc/dg/TestGetConfigParams.java | /*-
* ============LICENSE_START=======================================================
* ONAP : APPC
* ================================================================================
* Copyright (C) 2017-2018 AT&T Intellectual Property. All rights reserved.
* ================================================================================
* Copyright (C) 2017 Amdocs
* =============================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ============LICENSE_END=========================================================
*/
package org.onap.appc.dg;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import org.onap.appc.dg.mock.instance.MockConfigureNodeExecutor;
import org.onap.appc.dg.mock.instance.MockSvcLogicJavaPlugin;
/* move to open source
import org.onap.sdnc.dgtestlibrary.AbstractDGTestCase;
import org.onap.sdnc.dgtestlibrary.DGTestCase;
import org.onap.sdnc.dgtestlibrary.GraphKey;
*/
import org.onap.ccsdk.sli.core.sli.SvcLogicContext;
public class TestGetConfigParams /* extends AbstractDGTestCase */ {
/*
public static String getConfigParamsXML = "src/main/resources/xml/APPC_GetConfigParams.xml";
@Test
public void testGetConfigParamsWithDefaultTemplate() {
try {
String propertyfileName = "APPC/GetConfigParams/DefaultTemplate.properties";
Map<String, Object> serviceReferences = new HashMap<String, Object>();
serviceReferences.put("org.onap.sdnc.config.generator.convert.ConvertNode", new org.onap.sdnc.config.generator.convert.ConvertNode());
// Register Call graphs
String injectGraphXmls[] = new String[] { getConfigParamsXML };
GraphKey graphKey = new GraphKey("APPC", null, "GetConfigParams", null);
DGTestCase tc = new DGTestCase(graphKey);
tc.setInjectGraphXmls(injectGraphXmls);
tc.setServiceReferences(serviceReferences);
tc.setPropertyfileName(propertyfileName);
SvcLogicContext ctx = new SvcLogicContext();
processTestCase(tc, ctx);
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testGetConfigParamsWithTemplateName() {
try {
String propertyfileName = "APPC/GetConfigParams/TemplateName.properties";
Map<String, Object> serviceReferences = new HashMap<String, Object>();
serviceReferences.put("org.onap.sdnc.config.generator.convert.ConvertNode", new org.onap.sdnc.config.generator.convert.ConvertNode());
// Register Call graphs
String injectGraphXmls[] = new String[] { getConfigParamsXML };
GraphKey graphKey = new GraphKey("APPC", null, "GetConfigParams", null);
DGTestCase tc = new DGTestCase(graphKey);
tc.setInjectGraphXmls(injectGraphXmls);
tc.setServiceReferences(serviceReferences);
tc.setPropertyfileName(propertyfileName);
SvcLogicContext ctx = new SvcLogicContext();
processTestCase(tc, ctx);
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testGetConfigParamsWithCliString() {
try {
String propertyfileName = "APPC/GetConfigParams/CliString.properties";
Map<String, Object> serviceReferences = new HashMap<String, Object>();
serviceReferences.put("org.onap.sdnc.config.generator.convert.ConvertNode", new org.onap.sdnc.config.generator.convert.ConvertNode());
// Register Call graphs
String injectGraphXmls[] = new String[] { getConfigParamsXML };
GraphKey graphKey = new GraphKey("APPC", null, "GetConfigParams", null);
DGTestCase tc = new DGTestCase(graphKey);
tc.setInjectGraphXmls(injectGraphXmls);
tc.setServiceReferences(serviceReferences);
tc.setPropertyfileName(propertyfileName);
SvcLogicContext ctx = new SvcLogicContext();
processTestCase(tc, ctx);
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testGetConfigParamsWithCliJson() {
try {
String propertyfileName = "APPC/GetConfigParams/CliJson.properties";
Map<String, Object> serviceReferences = new HashMap<String, Object>();
serviceReferences.put("org.onap.sdnc.config.generator.convert.ConvertNode", new org.onap.sdnc.config.generator.convert.ConvertNode());
// Register Call graphs
String injectGraphXmls[] = new String[] { getConfigParamsXML };
GraphKey graphKey = new GraphKey("APPC", null, "GetConfigParams", null);
DGTestCase tc = new DGTestCase(graphKey);
tc.setInjectGraphXmls(injectGraphXmls);
tc.setServiceReferences(serviceReferences);
tc.setPropertyfileName(propertyfileName);
SvcLogicContext ctx = new SvcLogicContext();
processTestCase(tc, ctx);
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testGetConfigParamsForRestore() {
try {
String propertyfileName = "APPC/GetConfigParams/Restore.properties";
Map<String, Object> serviceReferences = new HashMap<String, Object>();
serviceReferences.put("org.onap.sdnc.config.generator.convert.ConvertNode", new org.onap.sdnc.config.generator.convert.ConvertNode());
// Register Call graphs
String injectGraphXmls[] = new String[] { getConfigParamsXML };
GraphKey graphKey = new GraphKey("APPC", null, "GetConfigParams", null);
DGTestCase tc = new DGTestCase(graphKey);
tc.setInjectGraphXmls(injectGraphXmls);
tc.setServiceReferences(serviceReferences);
tc.setPropertyfileName(propertyfileName);
SvcLogicContext ctx = new SvcLogicContext();
processTestCase(tc, ctx);
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testGetConfigParamsCommonConfigFail() {
try {
String propertyfileName = "APPC/GetConfigParams/CommonConfigFail.properties";
Map<String, Object> serviceReferences = new HashMap<String, Object>();
serviceReferences.put("org.onap.sdnc.config.generator.convert.ConvertNode", new org.onap.sdnc.config.generator.convert.ConvertNode());
// Register Call graphs
String injectGraphXmls[] = new String[] { getConfigParamsXML };
GraphKey graphKey = new GraphKey("APPC", null, "GetConfigParams", null);
DGTestCase tc = new DGTestCase(graphKey);
tc.setInjectGraphXmls(injectGraphXmls);
tc.setServiceReferences(serviceReferences);
tc.setPropertyfileName(propertyfileName);
SvcLogicContext ctx = new SvcLogicContext();
processTestCase(tc, ctx);
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void testGetConfigParamsJson2DGContextFail() {
try {
String propertyfileName = "APPC/GetConfigParams/Json2DGContextFail.properties";
Map<String, Object> serviceReferences = new HashMap<String, Object>();
// Register Call graphs
String injectGraphXmls[] = new String[] { getConfigParamsXML };
GraphKey graphKey = new GraphKey("APPC", null, "GetConfigParams", null);
DGTestCase tc = new DGTestCase(graphKey);
tc.setInjectGraphXmls(injectGraphXmls);
tc.setServiceReferences(serviceReferences);
tc.setPropertyfileName(propertyfileName);
SvcLogicContext ctx = new SvcLogicContext();
processTestCase(tc, ctx);
} catch (Exception e) {
e.printStackTrace();
}
}
*/
}
|
OpenSextant/opensextant | OpenSextantToolbox/src/org/mitre/opensextant/processing/ResultsUtility.java | /**
* Copyright 2009-2013 The MITRE Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*
* **************************************************************************
* NOTICE This software was produced for the U. S. Government under Contract No.
* W15P7T-12-C-F600, and is subject to the Rights in Noncommercial Computer
* Software and Noncommercial Computer Software Documentation Clause
* 252.227-7014 (JUN 1995)
*
* (c) 2012 The MITRE Corporation. All Rights Reserved.
* **************************************************************************
*
*/
package org.mitre.opensextant.processing;
import java.text.DecimalFormat;
import org.mitre.opensextant.util.TextUtils;
import org.mitre.opensextant.extraction.TextEntity;
import java.util.HashSet;
import java.util.Set;
import java.util.Map;
import java.util.HashMap;
/**
* Abstract class encapsulating basic results formatter functionality.
*
*/
public class ResultsUtility {
// -------------
public final static String PLACE_ANNOTATION = "PLACE";
public final static String PLACE_CANDIDATE_ANNOTATION = "placeCandidate";
public final static String GEOCOORD_ANNOTATION = "geocoord";
public final static Set<String> GATE_GEOCODE_ANNOTATIONS = new HashSet<String>();
static {
// This annot set matches "isLocation(annotType)"
GATE_GEOCODE_ANNOTATIONS.add(GEOCOORD_ANNOTATION);
GATE_GEOCODE_ANNOTATIONS.add(PLACE_ANNOTATION);
}
/**
* The default TEXT WIDTH. ~75 chars per line yields 2 lines of text.
*/
public static int TEXT_WIDTH = 150;
/**
* Given the GATE annotation set the context on the TextEntity object.
*/
public static void setPrePostContextFor(String content, TextEntity t, int offset, int match_size, int doc_size) {
if (t.getContextAfter() != null) {
return;
}
int[] bounds = TextUtils.get_text_window(offset, match_size, doc_size, TEXT_WIDTH);
t.setContext(
content.substring(bounds[0], bounds[1]), // text before match
content.substring(bounds[2], bounds[3])); // text after match
}
/**
* Given the GATE annotation set the context on the TextEntity object.
*/
public static void setContextFor(String content,
TextEntity t, int offset, int match_size, int doc_size) {
if (t.getContext() != null) {
return;
}
int[] bounds = TextUtils.get_text_window(offset, doc_size, TEXT_WIDTH);
t.setContext(TextUtils.squeeze_whitespace(content.substring(bounds[0], bounds[1]))); // text after match
}
/**
* Is this a Location annotation type?
*/
public static boolean isLocation(String a) {
return GEOCOORD_ANNOTATION.equals(a) || PLACE_ANNOTATION.equals(a);
}
/**
* Is this a Location geocoordinate annotation type?
*/
public static boolean isCoordinate(String a) {
return GEOCOORD_ANNOTATION.equals(a);
}
/**
* Is this a Location placename annotation type?
*/
public static boolean isPlaceName(String a) {
return PLACE_ANNOTATION.equals(a);
}
/**
* Control floating point accuracy on any results.
*
* @return A string representation of a double with a fixed number of digits
* to the right of the decimal point.
*/
final static DecimalFormat confFmt = new DecimalFormat("0.000");
public static String formatConfidence(double d) {
return confFmt.format(d);
}
/**
* Precision -- this is a first draft attempt at assigning some error bars
* to geocoding results.
*
* TODO: move this to a configuration file
*
* feat/code: prec # precision is meters of error for a given gazetteer
* entry with feat/code)
*
* A/ADM1: 50000 # ADM1 is generally +/- 50km, world wide P/PPL: 1000 # city
* is generally +/- 1km within center point P/PPLC: 10000 # major capital
* city is 10km of error, etc.
*
*/
public final static Map<String, Integer> FEATURE_PRECISION = new HashMap<String, Integer>();
public final static Map<String, Integer> FEATURE_GEOHASH_PRECISION = new HashMap<String, Integer>();
public final static int DEFAULT_PRECISION = 50000; // +/- 50KM
public final static int DEFAULT_GEOHASH_PRECISION = 5;
static {
FEATURE_PRECISION.put("P", 5000);
FEATURE_PRECISION.put("A", DEFAULT_PRECISION);
FEATURE_PRECISION.put("S", 1000);
FEATURE_PRECISION.put("A/ADM1", DEFAULT_PRECISION);
FEATURE_PRECISION.put("A/ADM2", 20000);
FEATURE_PRECISION.put("P/PPL", 5000);
FEATURE_PRECISION.put("P/PPLC", 10000);
// This helps guage how long should a geohash be for a given feature.
FEATURE_GEOHASH_PRECISION.put("A/PCLI", 3);
FEATURE_GEOHASH_PRECISION.put("CTRY", 3);
FEATURE_GEOHASH_PRECISION.put("P", 6);
FEATURE_GEOHASH_PRECISION.put("A", 4);
FEATURE_GEOHASH_PRECISION.put("S", 8);
FEATURE_GEOHASH_PRECISION.put("A/ADM2", 5);
}
/**
* For a given feature type and code, determine what sort of resolution or
* precision should be considered for that place, approximately.
*
* @return precision approx error in meters for a given feature. -1 if no
* feature type given.
*/
public static int getFeaturePrecision(String feat_type, String feat_code) {
if (feat_type == null && feat_code == null) {
// Unknown, uncategorized feature
return DEFAULT_PRECISION;
}
String lookup = (feat_code != null
? feat_type + "/" + feat_code : feat_type);
Integer prec = FEATURE_PRECISION.get(lookup);
if (prec != null) {
return prec.intValue();
}
prec = FEATURE_PRECISION.get(feat_type);
if (prec != null) {
return prec.intValue();
}
return DEFAULT_PRECISION;
}
/** For a given Geonames feature class/designation provide a guess about how long
* geohash should be.
*/
public static int getGeohashPrecision(String feat_type, String feat_code) {
if (feat_type == null && feat_code == null) {
// Unknown, uncategorized feature
return DEFAULT_GEOHASH_PRECISION;
}
String lookup = (feat_code != null
? feat_type + "/" + feat_code : feat_type);
Integer prec = FEATURE_GEOHASH_PRECISION.get(lookup);
if (prec != null) {
return prec.intValue();
}
prec = FEATURE_GEOHASH_PRECISION.get(feat_type);
if (prec != null) {
return prec.intValue();
}
return DEFAULT_GEOHASH_PRECISION;
}
}
|
LaudateCorpus1/math | stan/math/opencl/kernels/ordered_logistic_lpmf.hpp | <reponame>LaudateCorpus1/math
#ifndef STAN_MATH_OPENCL_KERNELS_ORDERED_LOGISTIC_LPMF_HPP
#define STAN_MATH_OPENCL_KERNELS_ORDERED_LOGISTIC_LPMF_HPP
#ifdef STAN_OPENCL
#include <stan/math/opencl/kernel_cl.hpp>
#include <stan/math/opencl/kernels/device_functions/log1m_exp.hpp>
#include <stan/math/opencl/kernels/device_functions/log1p_exp.hpp>
namespace stan {
namespace math {
namespace opencl_kernels {
// \cond
static const char* ordered_logistic_kernel_code = STRINGIFY(
// \endcond
/** \ingroup opencl_kernels
* GPU implementation of ordinal regression.
*
* Must be run with at least N_instances threads and local size equal to
* LOCAL_SIZE_.
* @param[out] logp_global partially summed log probability (1 value per
* work group)
* @param[out] lambda_derivative derivative wrt lambda
* @param[out] cuts_derivative partially summed derivative wrt cuts (1
* column per work group)
* @param[in] y_global a scalar or vector of classes.
* @param[in] lambda_global vector of continuous lambda variables
* @param[in] cuts cutpoints vector
* @param N_instances number of cases
* @param N_classes number of classes
* @param is_y_vector 0 or 1 - whether y is a vector (alternatively it is a
* scalar we need to broadcast)
* @param is_cuts_matrix 0 or 1 - whether cuts is a matrix (alternatively it
* is a vector we need to broadcast)
* @param need_lambda_derivative 0 or 1 - whether lambda_derivative needs to
* be computed
* @param need_cuts_derivative 0 or 1 - whether cuts_derivative needs to be
* computed
*/
__kernel void ordered_logistic(
__global double* logp_global, __global double* lambda_derivative,
__global double* cuts_derivative, const __global int* y_global,
const __global double* lambda_global, const __global double* cuts,
const int N_instances, const int N_classes, const int is_y_vector,
const int is_cuts_matrix, const int need_lambda_derivative,
const int need_cuts_derivative) {
const int gid = get_global_id(0);
const int lid = get_local_id(0);
const int lsize = get_local_size(0);
const int wg_id = get_group_id(0);
const int ngroups = get_num_groups(0);
__local double local_storage[LOCAL_SIZE_];
double logp = 0;
double d1 = 0;
double d2 = 0;
int y;
int cuts_start = (N_classes - 1) * gid * is_cuts_matrix;
// Most calculations only happen for relevant data within next if.
// Exceptions are reductions between threads that need barriers.
if (gid < N_instances) {
double lambda = lambda_global[gid];
y = y_global[gid * is_y_vector];
if (y < 1 || y > N_classes || !isfinite(lambda)) {
logp = NAN;
} else {
const double cut_y1
= y == N_classes ? INFINITY : cuts[cuts_start + y - 1];
const double cut_y2 = y == 1 ? -INFINITY : cuts[cuts_start + y - 2];
const double cut1 = lambda - cut_y1;
const double cut2 = lambda - cut_y2;
if (y != N_classes) {
logp -= log1p_exp(cut1);
}
if (y != 1) {
logp -= log1p_exp(-cut2);
}
if (y != 1 && y != N_classes) {
logp += log1m_exp(cut1 - cut2);
}
if (need_lambda_derivative || need_cuts_derivative) {
double exp_cuts_diff = exp(cut_y2 - cut_y1);
if (cut2 > 0) {
double exp_m_cut2 = exp(-cut2);
d1 = exp_m_cut2 / (1 + exp_m_cut2);
} else {
d1 = 1 / (1 + exp(cut2));
}
d1 -= exp_cuts_diff / (exp_cuts_diff - 1);
d2 = 1 / (1 - exp_cuts_diff);
if (cut1 > 0) {
double exp_m_cut1 = exp(-cut1);
d2 -= exp_m_cut1 / (1 + exp_m_cut1);
} else {
d2 -= 1 / (1 + exp(cut1));
}
if (need_lambda_derivative) {
lambda_derivative[gid] = d1 - d2;
}
}
}
}
if (need_cuts_derivative) {
if (is_cuts_matrix) {
if (gid < N_instances) {
for (int i = 0; i < N_classes - 1; i++) {
if (y - 1 == i) {
cuts_derivative[cuts_start + i] = d2;
} else if (y - 2 == i) {
cuts_derivative[cuts_start + i] = -d1;
} else {
cuts_derivative[cuts_start + i] = 0.0;
}
}
}
} else {
for (int i = 0; i < N_classes - 1; i++) {
local_storage[lid] = 0;
if (gid < N_instances) {
if (y - 1 == i) {
local_storage[lid] = d2;
} else if (y - 2 == i) {
local_storage[lid] = -d1;
}
}
// Sum cuts_derivative, calculated by different threads.
// Since we can't sum between different work groups, we emit one
// number per work group. These must be summed on CPU for final
// result.
barrier(CLK_LOCAL_MEM_FENCE);
for (int step = lsize / REDUCTION_STEP_SIZE; step > 0;
step /= REDUCTION_STEP_SIZE) {
if (lid < step) {
for (int i = 1; i < REDUCTION_STEP_SIZE; i++) {
local_storage[lid] += local_storage[lid + step * i];
}
}
barrier(CLK_LOCAL_MEM_FENCE);
}
if (lid == 0) {
cuts_derivative[(N_classes - 1) * wg_id + i] = local_storage[0];
}
barrier(CLK_LOCAL_MEM_FENCE);
}
}
}
local_storage[lid] = logp;
barrier(CLK_LOCAL_MEM_FENCE);
for (int step = lsize / REDUCTION_STEP_SIZE; step > 0;
step /= REDUCTION_STEP_SIZE) {
if (lid < step) {
for (int i = 1; i < REDUCTION_STEP_SIZE; i++) {
local_storage[lid] += local_storage[lid + step * i];
}
}
barrier(CLK_LOCAL_MEM_FENCE);
}
if (lid == 0) {
logp_global[wg_id] = local_storage[0];
}
}
// \cond
);
// \endcond
/** \ingroup opencl_kernels
* See the docs for \link kernels/ordered_logistic_lpmf.hpp
* ordered_logistic() \endlink
*/
const kernel_cl<out_buffer, out_buffer, out_buffer, in_buffer, in_buffer,
in_buffer, int, int, int, int, int, int>
ordered_logistic("ordered_logistic",
{log1p_exp_device_function, log1m_exp_device_function,
ordered_logistic_kernel_code},
{{"REDUCTION_STEP_SIZE", 4}, {"LOCAL_SIZE_", 64}});
} // namespace opencl_kernels
} // namespace math
} // namespace stan
#endif
#endif
|
OGJG-ZAMONG/ZAMONG-Backend-V1 | src/main/java/app/jg/og/zamong/entity/dream/selldream/chatting/chat/SellDreamChatting.java | <reponame>OGJG-ZAMONG/ZAMONG-Backend-V1
package app.jg.og.zamong.entity.dream.selldream.chatting.chat;
import app.jg.og.zamong.entity.dream.selldream.chatting.room.SellDreamChattingRoom;
import app.jg.og.zamong.entity.user.User;
import lombok.*;
import org.hibernate.annotations.GenericGenerator;
import org.springframework.data.annotation.CreatedDate;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import javax.persistence.*;
import java.time.LocalDateTime;
import java.util.UUID;
@Getter
@Builder
@AllArgsConstructor
@NoArgsConstructor(access = AccessLevel.PROTECTED)
@EntityListeners(AuditingEntityListener.class)
@Entity
public class SellDreamChatting {
@Id
@GeneratedValue(generator = "uuid2")
@GenericGenerator(name = "uuid2", strategy = "uuid2")
@Column(columnDefinition = "BINARY(16)")
private UUID uuid;
private String chat;
@CreatedDate
@Column(name = "created_at")
private LocalDateTime createdAt;
@ManyToOne
@JoinColumn(name = "user_uuid", nullable = false)
private User user;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "room_uuid", nullable = false)
private SellDreamChattingRoom room;
}
|
ahmedtaiye/tfeatslekan | cosineeuclidean.py | from __future__ import print_function
import sklearn
import mpl_toolkits
import os # for os.path.basename
from sklearn.metrics.pairwise import euclidean_distances
from sklearn.metrics.pairwise import cosine_similarity
import matplotlib.pyplot as plt
from sklearn.manifold import MDS
from scipy.cluster.hierarchy import ward, dendrogram
sklearn.__version__
import numpy as np
from sklearn.feature_extraction.text import CountVectorizer
filenames = ['ken-lay_body.txt',
'jeff-skilling_body.txt',
'Richard-shapiro_body.txt',
'kay-mann_body.txt',
'Jeff-dasovich_body.txt',
'tana jones_body.txt',
'<NAME>_body.txt',
'shackleton sara_body.txt',
'<NAME>_body.txt',
'Mark taylor_body.txt',
'davis pete_body.txt',
'Chris g_body.txt',
'kate symes_body.txt']
#Calculating the cardinality and dissimilarities.
Cardinality=0
for files in filenames:
if files.endswith('.txt'):
Cardinality+=1
vectorizer = CountVectorizer(input='filename')
dtm = vectorizer.fit_transform(filenames) # a sparse matrix
vocab = vectorizer.get_feature_names() # a list
#type(dtm)
dtm = dtm.toarray() # convert to a regular array
#print (dtm.shape)
N, K = dtm.shape
ind = np.arange(N) # points on the x-axis
width = 0.2
vocab = np.array(vocab)
n, _ = dtm.shape
dist = np.zeros((n, n))
#dissimilarity
Dissimilarity=dist
for i in range(n):
for j in range(n):
x, y = dtm[i, :], dtm[j, :]
dist[i, j] = np.sqrt(np.sum((x - y)**2))
#A sparse matrix only records non-zero entries and is used to store matrices that contain a significant number of entries
# that are zero.To understand why this matters enough that CountVectorizer returns a sparse matrix by default,
# consider a 4000 by 50000 matrix of word frequencies that is 60% zeros. In Python an integer takes up four bytes,
# so using a sparse matrix saves almost 500M of memory, which is a considerable amount of computer memory in the 2010s.
# (Recall that Python objects such as arrays are stored in memory, not on disk). If you are working with a very
# large collection of texts, you may encounter memory errors after issuing the commands above. Provided your corpus is not truly
# massive, it may be advisable to locate a machine with a greater amount of memory. For example, these days
# it is possible to rent a machine with 64G of memory by the hour. Conducting experiments on a random subsample
# (small enough to fit into memory)
#is also recommended.
plt.bar(ind, dtm[:,0], width=width)
plt.xticks(ind + width, filenames) # put labels in the center
plt.title('Share of Topic #0')
dist = euclidean_distances(dtm)
print (np.round(dist, 1))
#Keep in mind that cosine similarity is a measure of similarity (rather than distance) that ranges between 0 and 1 (as it is the cosine of the angle between the two vectors).
# In order to get a measure of distance (or dissimilarity), we need to “flip” the measure so that a larger angle receives a larger value. The distance measure
# derived from cosine similarity is therefore one minus the cosine similarity between two vectors.
dist = 1 - cosine_similarity(dtm)
print (np.round(dist, 2))
norms = np.sqrt(np.sum(dtm * dtm, axis=1, keepdims=True)) # multiplication between arrays is element-wise
dtm_normed = dtm / norms
similarities = np.dot(dtm_normed, dtm_normed.T)
print ("\n COSINE DISSIMILARITY of .txt files in: ")
print (np.round(similarities, 2))
from scipy.spatial import KDTree
r=(np.round(dist, 1)).T
print ("\n EUCLIDEAN DISSIMILARITY of .txt files in: ")
print (np.round(similarities, 1))
mds = MDS(n_components=2, dissimilarity="precomputed", random_state=1)
pos = mds.fit_transform(dist) # shape (n_components, n_samples)
xs, ys = pos[:, 0], pos[:, 1]
# short versions of filenames:
# convert 'data/austen-brontë/Austen_Emma.txt' to 'Austen_Emma'
names = [os.path.basename(fn).replace('.txt', '') for fn in filenames]
# color-blind-friendly palette
for x, y, name in zip(xs, ys, names):
color = 'red' if "ken-lay_body" in name else 'skyblue'
plt.scatter(x, y, c=color)
plt.text(x, y, name)
plt.show()
mds = MDS(n_components=3, dissimilarity="precomputed", random_state=1)
pos = mds.fit_transform(dist)
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(pos[:, 0], pos[:, 1], pos[:, 2])
for x, y, z, s in zip(pos[:, 0], pos[:, 1], pos[:, 2], names):
ax.text(x, y, z, s)
plt.show()
linkage_matrix = ward(dist)
# match dendrogram to that returned by R's hclust()
dendrogram(linkage_matrix, orientation="right", labels=names)
plt.tight_layout() # fixes margins
plt.show()
|
highfestiva/life | tbc/include/geometryreference.h |
// Author: <NAME>
// Copyright (c) Pixel Doctrine
#pragma once
#include "geometrybase.h"
#include "../../lepra/include/transformation.h"
namespace tbc {
class GeometryReference: public GeometryBase {
typedef GeometryBase Parent;
public:
GeometryReference(GeometryBase* geometry);
virtual ~GeometryReference();
bool IsGeometryReference();
const xform& GetOffsetTransformation() const;
void SetOffsetTransformation(const xform& offset);
void AddOffset(const vec3& offset);
const xform& GetExtraOffsetTransformation() const;
void SetExtraOffsetTransformation(const xform& offset);
// Overloads from GeometryBase.
const xform& GetTransformation();
GeometryVolatility GetGeometryVolatility() const;
void SetGeometryVolatility(GeometryVolatility volatility);
unsigned int GetMaxVertexCount() const;
unsigned int GetMaxIndexCount() const;
unsigned int GetVertexCount() const;
unsigned int GetIndexCount() const;
unsigned int GetUVSetCount() const;
float* GetVertexData() const;
float* GetUVData(unsigned int uv_set) const;
vtx_idx_t* GetIndexData() const;
uint8* GetColorData() const;
float* GetNormalData() const;
GeometryBase* GetParentGeometry() const;
protected:
private:
GeometryBase* geometry_;
xform original_offset_;
xform extra_offset_;
xform return_transformation_;
};
}
|
nyctophiliacme/edtech-backend | questions/migrations/0001_initial.py | # Generated by Django 3.0.6 on 2020-05-27 14:12
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('chapters', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.TextField()),
('question_title', models.TextField(blank=True, null=True)),
('question_type', models.CharField(blank=True, max_length=100, null=True)),
('question_img_url', models.TextField(blank=True, null=True)),
('difficulty_level', models.CharField(choices=[('easy', 'Easy'), ('moderate', 'Moderate'), ('difficult', 'Difficult')], db_index=True, default='moderate', max_length=50)),
('time_to_solve', models.IntegerField(blank=True, null=True)),
('answer_selection_type', models.CharField(db_index=True, default='single_choice', max_length=50)),
('explanation', models.TextField(blank=True, null=True)),
('explanation_img_url', models.TextField(blank=True, null=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('modified_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='QuestionChoice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.TextField()),
('choice_img_url', models.TextField(blank=True, null=True)),
('is_right_choice', models.BooleanField(default=False)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='questions.Question')),
],
),
migrations.CreateModel(
name='QuestionChapterMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('chapter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='chapters.Chapter')),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='questions.Question')),
],
options={
'unique_together': {('question', 'chapter')},
},
),
]
|
lhf974941160211/Latin | library/src/android/support/v7/internal/view/ActionModeWrapper.java | /*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.v7.internal.view;
import android.content.Context;
import android.support.v7.internal.view.menu.MenuWrapperFactory;
import android.support.v7.view.ActionMode;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.View;
/**
* @hide
*/
public class ActionModeWrapper extends ActionMode {
final MenuInflater mInflater;
final android.view.ActionMode mWrappedObject;
public ActionModeWrapper(Context context, android.view.ActionMode frameworkActionMode) {
mWrappedObject = frameworkActionMode;
mInflater = new SupportMenuInflater(context);
}
@Override
public Object getTag() {
return mWrappedObject.getTag();
}
@Override
public void setTag(Object tag) {
mWrappedObject.setTag(tag);
}
@Override
public boolean getTitleOptionalHint() {
return mWrappedObject.getTitleOptionalHint();
}
@Override
public void setTitleOptionalHint(boolean titleOptional) {
mWrappedObject.setTitleOptionalHint(titleOptional);
}
@Override
public boolean isTitleOptional() {
return mWrappedObject.isTitleOptional();
}
@Override
public void setTitle(CharSequence title) {
mWrappedObject.setTitle(title);
}
@Override
public void setSubtitle(CharSequence subtitle) {
mWrappedObject.setSubtitle(subtitle);
}
@Override
public void invalidate() {
mWrappedObject.invalidate();
}
@Override
public void finish() {
mWrappedObject.finish();
}
@Override
public Menu getMenu() {
return MenuWrapperFactory.createMenuWrapper(mWrappedObject.getMenu());
}
@Override
public CharSequence getTitle() {
return mWrappedObject.getTitle();
}
@Override
public void setTitle(int resId) {
mWrappedObject.setTitle(resId);
}
@Override
public CharSequence getSubtitle() {
return mWrappedObject.getSubtitle();
}
@Override
public void setSubtitle(int resId) {
mWrappedObject.setSubtitle(resId);
}
@Override
public View getCustomView() {
return mWrappedObject.getCustomView();
}
@Override
public void setCustomView(View view) {
mWrappedObject.setCustomView(view);
}
@Override
public MenuInflater getMenuInflater() {
return mInflater;
}
/**
* @hide
*/
public static class CallbackWrapper implements android.view.ActionMode.Callback {
final Callback mWrappedCallback;
final Context mContext;
private ActionModeWrapper mLastStartedActionMode;
public CallbackWrapper(Context context, Callback supportCallback) {
mContext = context;
mWrappedCallback = supportCallback;
}
@Override
public boolean onCreateActionMode(android.view.ActionMode mode, android.view.Menu menu) {
return mWrappedCallback.onCreateActionMode(getActionModeWrapper(mode),
MenuWrapperFactory.createMenuWrapper(menu));
}
@Override
public boolean onPrepareActionMode(android.view.ActionMode mode, android.view.Menu menu) {
return mWrappedCallback.onPrepareActionMode(getActionModeWrapper(mode),
MenuWrapperFactory.createMenuWrapper(menu));
}
@Override
public boolean onActionItemClicked(android.view.ActionMode mode,
android.view.MenuItem item) {
return mWrappedCallback.onActionItemClicked(getActionModeWrapper(mode),
MenuWrapperFactory.createMenuItemWrapper(item));
}
@Override
public void onDestroyActionMode(android.view.ActionMode mode) {
mWrappedCallback.onDestroyActionMode(getActionModeWrapper(mode));
}
public void setLastStartedActionMode(ActionModeWrapper modeWrapper) {
mLastStartedActionMode = modeWrapper;
}
private ActionMode getActionModeWrapper(android.view.ActionMode mode) {
if (mLastStartedActionMode != null && mLastStartedActionMode.mWrappedObject == mode) {
// If the given mode equals our wrapped mode, just return it
return mLastStartedActionMode;
} else {
return new ActionModeWrapper(mContext, mode);
}
}
}
}
|
GarryLance/GSKit | GSKit/UIKit/CollectionView/GSCollectionViewItemModel.h | //
// GSCollectionViewItemModel.h
// GSKitDemo
//
// Created by OSU on 16/7/28.
// Copyright © 2016年 GarryLance. All rights reserved.
//
#import "GSModelBase.h"
@interface GSCollectionViewItemModel : GSModelBase
@property (copy, nonatomic) NSString * placeholderImageName;
@property (copy, nonatomic) NSString * imageName;
@property (copy, nonatomic) NSURL * imageUrl;
@property (copy, nonatomic) NSString * title;
@property (copy, nonatomic) NSDictionary * titleAttributes;
@end
|
cwiki-us/Java-Tutorial | core-java-modules/core-java-8/src/main/java/com/ossez/strategy/Discounter.java | <filename>core-java-modules/core-java-8/src/main/java/com/ossez/strategy/Discounter.java
package com.ossez.strategy;
import java.math.BigDecimal;
import java.util.function.UnaryOperator;
public interface Discounter extends UnaryOperator<BigDecimal> {
default Discounter combine(Discounter after) {
return value -> after.apply(this.apply(value));
}
static Discounter christmas() {
return (amount) -> amount.multiply(BigDecimal.valueOf(0.9));
}
static Discounter newYear() {
return (amount) -> amount.multiply(BigDecimal.valueOf(0.8));
}
static Discounter easter() {
return (amount) -> amount.multiply(BigDecimal.valueOf(0.5));
}
}
|
benety/mongo | src/mongo/util/net/openssl_init.cpp | <reponame>benety/mongo<gh_stars>0
/**
* Copyright (C) 2019-present MongoDB, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the Server Side Public License, version 1,
* as published by MongoDB, Inc.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Server Side Public License for more details.
*
* You should have received a copy of the Server Side Public License
* along with this program. If not, see
* <http://www.mongodb.com/licensing/server-side-public-license>.
*
* As a special exception, the copyright holders give permission to link the
* code of portions of this program with the OpenSSL library under certain
* conditions as described in each individual source file and distribute
* linked combinations including the program with the OpenSSL library. You
* must comply with the Server Side Public License in all respects for
* all of the code used other than as permitted herein. If you modify file(s)
* with this exception, you may extend this exception to your version of the
* file(s), but you are not obligated to do so. If you do not wish to do so,
* delete this exception statement from your version. If you delete this
* exception statement from all source files in the program, then also delete
* it in the license file.
*/
#include "mongo/platform/basic.h"
#include "mongo/base/init.h"
#include "mongo/config.h"
#include "mongo/logv2/log.h"
#include "mongo/platform/mutex.h"
#include "mongo/stdx/mutex.h"
#include "mongo/util/net/ssl_manager.h"
#include "mongo/util/net/ssl_options.h"
#include "mongo/util/scopeguard.h"
#include <memory>
#include <openssl/err.h>
#include <openssl/ssl.h>
#include <stack>
#include <vector>
#define MONGO_LOGV2_DEFAULT_COMPONENT ::mongo::logv2::LogComponent::kNetwork
namespace mongo {
namespace {
/**
* Multithreaded Support for SSL.
*
* In order to allow OpenSSL to work in a multithreaded environment, you
* may need to provide some callbacks for it to use for locking. The following code
* sets up a vector of mutexes and provides a thread unique ID number.
* The so-called SSLThreadInfo class encapsulates most of the logic required for
* OpenSSL multithreaded support.
*
* OpenSSL before version 1.1.0 requires applications provide a callback which emits a thread
* identifier. This ID is used to store thread specific ERR information. When a thread is
* terminated, it must call ERR_remove_state or ERR_remove_thread_state. These functions may
* themselves invoke the application provided callback. These IDs are stored in a hashtable with
* a questionable hash function. They must be uniformly distributed to prevent collisions.
*/
class SSLThreadInfo {
public:
static unsigned long getID() {
/** A handle for the threadID resource. */
struct ManagedId {
~ManagedId() {
idManager().releaseID(id);
}
const unsigned long id = idManager().reserveID();
};
// The `guard` callback will cause an invocation of `getID`, so it must be destroyed first.
static thread_local ManagedId managedId;
static thread_local ScopeGuard guard([] { ERR_remove_state(0); });
return managedId.id;
}
static void lockingCallback(int mode, int type, const char* file, int line) {
if (mode & CRYPTO_LOCK) {
mutexes()[type]->lock();
} else {
mutexes()[type]->unlock();
}
}
static void init() {
CRYPTO_set_id_callback(&SSLThreadInfo::getID);
CRYPTO_set_locking_callback(&SSLThreadInfo::lockingCallback);
while ((int)mutexes().size() < CRYPTO_num_locks()) {
mutexes().emplace_back(std::make_unique<stdx::recursive_mutex>());
}
}
private:
SSLThreadInfo() = delete;
// Note: see SERVER-8734 for why we are using a recursive mutex here.
// Once the deadlock fix in OpenSSL is incorporated into most distros of
// Linux, this can be changed back to a nonrecursive mutex.
static std::vector<std::unique_ptr<stdx::recursive_mutex>>& mutexes() {
// Keep the static as a pointer to avoid it ever to be destroyed. It is referenced in the
// CallErrRemoveState thread local above.
static auto m = new std::vector<std::unique_ptr<stdx::recursive_mutex>>();
return *m;
}
class ThreadIDManager {
public:
unsigned long reserveID() {
stdx::unique_lock<Latch> lock(_idMutex);
if (!_idLast.empty()) {
unsigned long ret = _idLast.top();
_idLast.pop();
return ret;
}
return ++_idNext;
}
void releaseID(unsigned long id) {
stdx::unique_lock<Latch> lock(_idMutex);
_idLast.push(id);
}
private:
// Machinery for producing IDs that are unique for the life of a thread.
Mutex _idMutex =
MONGO_MAKE_LATCH("ThreadIDManager::_idMutex"); // Protects _idNext and _idLast.
unsigned long _idNext = 0; // Stores the next thread ID to use, if none already allocated.
std::stack<unsigned long, std::vector<unsigned long>>
_idLast; // Stores old thread IDs, for reuse.
};
static ThreadIDManager& idManager() {
static auto& m = *new ThreadIDManager();
return m;
}
};
void setupFIPS() {
// Turn on FIPS mode if requested, OPENSSL_FIPS must be defined by the OpenSSL headers
#if defined(MONGO_CONFIG_HAVE_FIPS_MODE_SET)
int status = FIPS_mode_set(1);
if (!status) {
LOGV2_FATAL(23173,
"can't activate FIPS mode: {error}",
"Can't activate FIPS mode",
"error"_attr = SSLManagerInterface::getSSLErrorMessage(ERR_get_error()));
fassertFailedNoTrace(16703);
}
LOGV2(23172, "FIPS 140-2 mode activated");
#else
LOGV2_FATAL(23174, "this version of mongodb was not compiled with FIPS support");
fassertFailedNoTrace(17089);
#endif
}
MONGO_INITIALIZER_GENERAL(SetupOpenSSL, ("default"), ("CryptographyInitialized"))
(InitializerContext*) {
SSL_library_init();
SSL_load_error_strings();
ERR_load_crypto_strings();
if (sslGlobalParams.sslFIPSMode) {
setupFIPS();
}
// Add all digests and ciphers to OpenSSL's internal table
// so that encryption/decryption is backwards compatible
OpenSSL_add_all_algorithms();
// Setup OpenSSL multithreading callbacks and mutexes
SSLThreadInfo::init();
}
} // namespace
} // namespace mongo
|
profxj/ginga | ginga/canvas/CompoundMixin.py | #
# CompoundMixin.py -- enable compound capabilities.
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import sys
import traceback
from functools import reduce
import numpy as np
from ginga.canvas import coordmap
__all__ = ['CompoundMixin']
class CompoundMixin(object):
"""A CompoundMixin is a mixin class that makes an object that is an
aggregation of other objects.
It is used to make generic compound drawing types as well as (for example)
layers of canvases on top of an image.
"""
def __init__(self):
# holds a list of objects to be drawn
self.objects = []
if not hasattr(self, 'crdmap'):
self.crdmap = None
if not hasattr(self, 'coord'):
self.coord = None
self.opaque = False
self._contains_reduce = np.logical_or
def get_llur(self):
"""
Get lower-left and upper-right coordinates of the bounding box
of this compound object.
Returns
-------
x1, y1, x2, y2: a 4-tuple of the lower-left and upper-right coords
"""
points = np.array([obj.get_llur() for obj in self.objects])
t_ = points.T
x1, y1 = t_[0].min(), t_[1].min()
x2, y2 = t_[2].max(), t_[3].max()
return (x1, y1, x2, y2)
def get_edit_points(self, viewer):
x1, y1, x2, y2 = self.get_llur()
return [(x1, y1), (x2, y1), (x2, y2), (x1, y2)]
def contains_pts(self, pts):
if len(self.objects) == 0:
x_arr, y_arr = np.asarray(pts).T
return np.full(x_arr.shape, False, dtype=np.bool)
return reduce(self._contains_reduce,
map(lambda obj: obj.contains_pts(pts), self.objects))
def get_items_at(self, pt):
res = []
for obj in self.objects:
if obj.is_compound() and not obj.opaque:
# non-opaque compound object, list up compatible members
res.extend(obj.get_items_at(pt))
elif obj.contains_pt(pt):
#res.insert(0, obj)
res.append(obj)
return res
def get_objects_by_kind(self, kind):
return filter(lambda obj: obj.kind == kind, self.objects)
def get_objects_by_kinds(self, kinds):
return filter(lambda obj: obj.kind in kinds, self.objects)
def select_contains_pt(self, viewer, pt):
for obj in self.objects:
if obj.select_contains_pt(viewer, pt):
return True
return False
def select_items_at(self, viewer, pt, test=None):
res = []
try:
for obj in self.objects:
if obj.is_compound() and not obj.opaque:
# non-opaque compound object, list up compatible members
res.extend(obj.select_items_at(viewer, pt, test=test))
is_inside = obj.select_contains_pt(viewer, pt)
if test is None:
if is_inside:
res.append(obj)
elif test(obj, pt, is_inside):
# custom test
res.append(obj)
except Exception as e:
#print("error selecting objects: %s" % (str(e)))
try:
# log traceback, if possible
(type, value, tb) = sys.exc_info()
tb_str = "".join(traceback.format_tb(tb))
self.logger.error("Traceback:\n%s" % (tb_str))
except Exception:
tb_str = "Traceback information unavailable."
self.logger.error(tb_str)
res = []
return res
def initialize(self, canvas, viewer, logger):
# TODO: this needs to be merged with the code in CanvasObject
self.viewer = viewer
self.logger = logger
if self.crdmap is None:
if self.coord == 'offset':
self.crdmap = coordmap.OffsetMapper(viewer, self.ref_obj)
else:
try:
self.crdmap = viewer.get_coordmap(self.coord)
except Exception as e:
# last best effort--a generic data mapper
self.crdmap = coordmap.DataMapper(viewer)
# initialize children
for obj in self.objects:
obj.initialize(canvas, viewer, logger)
def inherit_from(self, obj):
self.crdmap = obj.crdmap
self.logger = obj.logger
self.viewer = obj.viewer
def is_compound(self):
return True
def use_coordmap(self, mapobj):
for obj in self.objects:
obj.use_coordmap(mapobj)
def draw(self, viewer):
for obj in self.objects:
obj.draw(viewer)
def get_objects(self):
return self.objects
def has_object(self, obj):
return obj in self.objects
def delete_object(self, obj):
self.objects.remove(obj)
def delete_objects(self, objects):
for obj in objects:
self.delete_object(obj)
def delete_all_objects(self):
self.objects[:] = []
def roll_objects(self, n):
num = len(self.objects)
if num == 0:
return
n = n % num
self.objects = self.objects[-n:] + self.objects[:-n]
def swap_objects(self):
num = len(self.objects)
if num >= 2:
l = self.objects
self.objects = l[:num - 2] + [l[num - 1], l[num - 2]]
def set_attr_all(self, **kwdargs):
for obj in self.objects:
for attrname, val in kwdargs.items():
if hasattr(obj, attrname):
setattr(obj, attrname, val)
def add_object(self, obj, belowThis=None):
obj.initialize(self, self.viewer, self.logger)
if belowThis is None:
self.objects.append(obj)
else:
index = self.objects.index(belowThis)
self.objects.insert(index, obj)
def raise_object(self, obj, aboveThis=None):
if aboveThis is None:
# no reference object--move to top
self.objects.remove(obj)
self.objects.append(obj)
else:
# Force an error if the reference object doesn't exist in list
index = self.objects.index(aboveThis)
self.objects.remove(obj)
index = self.objects.index(aboveThis)
self.objects.insert(index + 1, obj)
def lower_object(self, obj, belowThis=None):
if belowThis is None:
# no reference object--move to bottom
self.objects.remove(obj)
self.objects.insert(0, obj)
else:
# Force an error if the reference object doesn't exist in list
index = self.objects.index(belowThis)
self.objects.remove(obj)
index = self.objects.index(belowThis)
self.objects.insert(index, obj)
def rotate(self, theta, xoff=0, yoff=0):
for obj in self.objects:
obj.rotate(theta, xoff=xoff, yoff=yoff)
def move_delta_pt(self, off_pt):
for obj in self.objects:
obj.move_delta_pt(off_pt)
def scale_by_factors(self, factors):
for obj in self.objects:
obj.scale_by_factors(factors)
def get_reference_pt(self):
# Reference point for a compound object is the average of all
# it's contituents reference points
points = np.asarray([obj.get_reference_pt()
for obj in self.objects])
t_ = points.T
x, y = np.average(t_[0]), np.average(t_[1])
return (x, y)
get_center_pt = get_reference_pt
def reorder_layers(self):
self.objects.sort(key=lambda obj: getattr(obj, '_zorder', 0))
for obj in self.objects:
if obj.is_compound():
obj.reorder_layers()
def get_points(self):
res = []
for obj in self.objects:
res.extend(list(obj.get_points()))
return res
### NON-PEP8 EQUIVALENTS -- TO BE DEPRECATED ###
getItemsAt = get_items_at
getObjects = get_objects
deleteObject = delete_object
deleteObjects = delete_objects
deleteAllObjects = delete_all_objects
setAttrAll = set_attr_all
addObject = add_object
raiseObject = raise_object
lowerObject = lower_object
# END
|
consulo/consulo-csharp | csharp-psi-impl/src/main/java/consulo/csharp/lang/lexer/CSharpLexer.java | /*
* Copyright 2013-2017 consulo.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package consulo.csharp.lang.lexer;
import com.intellij.lexer.Lexer;
import com.intellij.lexer.LexerPosition;
import com.intellij.lexer.MergeFunction;
import com.intellij.lexer.MergingLexerAdapterBase;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.TokenSet;
import consulo.csharp.cfs.lang.CfsTokens;
import consulo.csharp.lang.psi.CSharpTemplateTokens;
import consulo.csharp.lang.psi.CSharpTokens;
import consulo.csharp.lang.psi.CSharpTokensImpl;
/**
* @author VISTALL
* @since 22.11.13.
*/
public class CSharpLexer extends MergingLexerAdapterBase
{
private static final TokenSet ourMergeSet = TokenSet.create(CSharpTemplateTokens.PREPROCESSOR_FRAGMENT, CSharpTokensImpl.LINE_DOC_COMMENT, CSharpTokensImpl.INTERPOLATION_STRING_LITERAL,
CfsTokens.FORMAT);
private static class MyMergeFunction implements MergeFunction
{
private MyMergeFunction()
{
}
@Override
public IElementType merge(final IElementType mergeToken, final Lexer originalLexer)
{
if(!ourMergeSet.contains(mergeToken))
{
return mergeToken;
}
while(true)
{
IElementType currentToken = originalLexer.getTokenType();
if(currentToken == null)
{
break;
}
// we need merge two docs if one line between
if(mergeToken == CSharpTokensImpl.LINE_DOC_COMMENT && currentToken == CSharpTokens.WHITE_SPACE)
{
LexerPosition currentPosition = originalLexer.getCurrentPosition();
originalLexer.advance();
boolean docIsNext = originalLexer.getTokenType() == CSharpTokensImpl.LINE_DOC_COMMENT;
originalLexer.restore(currentPosition);
if(docIsNext)
{
currentToken = CSharpTokensImpl.LINE_DOC_COMMENT;
}
else
{
break;
}
}
if(currentToken != mergeToken)
{
break;
}
originalLexer.advance();
}
return mergeToken;
}
}
private final MyMergeFunction myMergeFunction;
public CSharpLexer()
{
this(new _CSharpLexer(false));
}
public CSharpLexer(Lexer lexer)
{
super(lexer);
myMergeFunction = new MyMergeFunction();
}
@Override
public MergeFunction getMergeFunction()
{
return myMergeFunction;
}
}
|
JVVJV/FreeRTOS | FreeRTOS/Demo/CORTEX_STM32L152_IAR/system_and_ST_code/STM32L1xx_StdPeriph_Driver/src/stm32l1xx_rcc.c | /**
******************************************************************************
* @file stm32l1xx_rcc.c
* @author MCD Application Team
* @version V1.0.0RC1
* @date 07/02/2010
* @brief This file provides all the RCC firmware functions.
******************************************************************************
* @copy
*
* THE PRESENT FIRMWARE WHICH IS FOR GUIDANCE ONLY AIMS AT PROVIDING CUSTOMERS
* WITH CODING INFORMATION REGARDING THEIR PRODUCTS IN ORDER FOR THEM TO SAVE
* TIME. AS A RESULT, STMICROELECTRONICS SHALL NOT BE HELD LIABLE FOR ANY
* DIRECT, INDIRECT OR CONSEQUENTIAL DAMAGES WITH RESPECT TO ANY CLAIMS ARISING
* FROM THE CONTENT OF SUCH FIRMWARE AND/OR THE USE MADE BY CUSTOMERS OF THE
* CODING INFORMATION CONTAINED HEREIN IN CONNECTION WITH THEIR PRODUCTS.
*
* <h2><center>© COPYRIGHT 2010 STMicroelectronics</center></h2>
*/
/* Includes ------------------------------------------------------------------*/
#include "stm32l1xx_rcc.h"
/** @addtogroup STM32L1xx_StdPeriph_Driver
* @{
*/
/** @defgroup RCC
* @brief RCC driver modules
* @{
*/
/** @defgroup RCC_Private_TypesDefinitions
* @{
*/
/**
* @}
*/
/** @defgroup RCC_Private_Defines
* @{
*/
/* ------------ RCC registers bit address in the alias region ----------- */
#define RCC_OFFSET (RCC_BASE - PERIPH_BASE)
/* --- CR Register ---*/
/* Alias word address of HSION bit */
#define CR_OFFSET (RCC_OFFSET + 0x00)
#define HSION_BitNumber 0x00
#define CR_HSION_BB (PERIPH_BB_BASE + (CR_OFFSET * 32) + (HSION_BitNumber * 4))
/* Alias word address of MSION bit */
#define MSION_BitNumber 0x08
#define CR_MSION_BB (PERIPH_BB_BASE + (CR_OFFSET * 32) + (MSION_BitNumber * 4))
/* Alias word address of PLLON bit */
#define PLLON_BitNumber 0x18
#define CR_PLLON_BB (PERIPH_BB_BASE + (CR_OFFSET * 32) + (PLLON_BitNumber * 4))
/* Alias word address of CSSON bit */
#define CSSON_BitNumber 0x1C
#define CR_CSSON_BB (PERIPH_BB_BASE + (CR_OFFSET * 32) + (CSSON_BitNumber * 4))
/* --- CSR Register ---*/
/* Alias word address of LSION bit */
#define CSR_OFFSET (RCC_OFFSET + 0x34)
#define LSION_BitNumber 0x00
#define CSR_LSION_BB (PERIPH_BB_BASE + (CSR_OFFSET * 32) + (LSION_BitNumber * 4))
/* Alias word address of RTCEN bit */
#define RTCEN_BitNumber 0x16
#define CSR_RTCEN_BB (PERIPH_BB_BASE + (CSR_OFFSET * 32) + (RTCEN_BitNumber * 4))
/* Alias word address of RTCRST bit */
#define RTCRST_BitNumber 0x17
#define CSR_RTCRST_BB (PERIPH_BB_BASE + (CSR_OFFSET * 32) + (RTCRST_BitNumber * 4))
/* ---------------------- RCC registers mask -------------------------------- */
/* RCC Flag Mask */
#define FLAG_MASK ((uint8_t)0x1F)
/* CR register byte 3 (Bits[23:16]) base address */
#define CR_BYTE3_ADDRESS ((uint32_t)0x40023802)
/* ICSCR register byte 4 (Bits[31:24]) base address */
#define ICSCR_BYTE4_ADDRESS ((uint32_t)0x40023807)
/* CFGR register byte 3 (Bits[23:16]) base address */
#define CFGR_BYTE3_ADDRESS ((uint32_t)0x4002380A)
/* CFGR register byte 4 (Bits[31:24]) base address */
#define CFGR_BYTE4_ADDRESS ((uint32_t)0x4002380B)
/* CIR register byte 2 (Bits[15:8]) base address */
#define CIR_BYTE2_ADDRESS ((uint32_t)0x4002380D)
/* CIR register byte 3 (Bits[23:16]) base address */
#define CIR_BYTE3_ADDRESS ((uint32_t)0x4002380E)
/* CSR register byte 2 (Bits[15:8]) base address */
#define CSR_BYTE2_ADDRESS ((uint32_t)0x40023835)
/**
* @}
*/
/** @defgroup RCC_Private_Macros
* @{
*/
/**
* @}
*/
/** @defgroup RCC_Private_Variables
* @{
*/
static __I uint8_t PLLMulTable[9] = {3, 4, 6, 8, 12, 16, 24, 32, 48};
static __I uint8_t APBAHBPrescTable[16] = {0, 0, 0, 0, 1, 2, 3, 4, 1, 2, 3, 4, 6, 7, 8, 9};
static __I uint8_t MSITable[7] = {0, 0, 0, 0, 1, 2, 4};
/**
* @}
*/
/** @defgroup RCC_Private_FunctionPrototypes
* @{
*/
/**
* @}
*/
/** @defgroup RCC_Private_Functions
* @{
*/
/**
* @brief Resets the RCC clock configuration to the default reset state.
* @param None
* @retval None
*/
void RCC_DeInit(void)
{
/* Set MSION bit */
RCC->CR |= (uint32_t)0x00000100;
/* Reset SW[1:0], HPRE[3:0], PPRE1[2:0], PPRE2[2:0], MCOSEL[2:0] and MCOPRE[2:0] bits */
RCC->CFGR &= (uint32_t)0x88FFC00C;
/* Reset HSION, HSEON, CSSON and PLLON bits */
RCC->CR &= (uint32_t)0xEEFEFFFE;
/* Reset HSEBYP bit */
RCC->CR &= (uint32_t)0xFFFBFFFF;
/* Reset PLLSRC, PLLMUL[3:0] and PLLDIV[1:0] bits */
RCC->CFGR &= (uint32_t)0xFF02FFFF;
/* Disable all interrupts */
RCC->CIR = 0x00000000;
}
/**
* @brief Configures the External High Speed oscillator (HSE).
* @note HSE can not be stopped if it is used directly or through the PLL as system clock.
* @param RCC_HSE: specifies the new state of the HSE.
* This parameter can be one of the following values:
* @arg RCC_HSE_OFF: HSE oscillator OFF
* @arg RCC_HSE_ON: HSE oscillator ON
* @arg RCC_HSE_Bypass: HSE oscillator bypassed with external clock
* @retval None
*/
void RCC_HSEConfig(uint8_t RCC_HSE)
{
/* Check the parameters */
assert_param(IS_RCC_HSE(RCC_HSE));
/* Reset HSEON and HSEBYP bits before configuring the HSE ------------------*/
*(__IO uint8_t *) CR_BYTE3_ADDRESS = RCC_HSE_OFF;
/* Set the new HSE configuration -------------------------------------------*/
*(__IO uint8_t *) CR_BYTE3_ADDRESS = RCC_HSE;
}
/**
* @brief Waits for HSE start-up.
* @param None
* @retval An ErrorStatus enumuration value:
* - SUCCESS: HSE oscillator is stable and ready to use
* - ERROR: HSE oscillator not yet ready
*/
ErrorStatus RCC_WaitForHSEStartUp(void)
{
__IO uint32_t StartUpCounter = 0;
ErrorStatus status = ERROR;
FlagStatus HSEStatus = RESET;
/* Wait till HSE is ready and if Time out is reached exit */
do
{
HSEStatus = RCC_GetFlagStatus(RCC_FLAG_HSERDY);
StartUpCounter++;
} while((StartUpCounter != HSE_STARTUP_TIMEOUT) && (HSEStatus == RESET));
if (RCC_GetFlagStatus(RCC_FLAG_HSERDY) != RESET)
{
status = SUCCESS;
}
else
{
status = ERROR;
}
return (status);
}
/**
* @brief Adjusts the Internal High Speed oscillator (HSI) calibration value.
* @param HSICalibrationValue: specifies the HSI calibration trimming value.
* This parameter must be a number between 0 and 0x1F.
* @retval None
*/
void RCC_AdjustHSICalibrationValue(uint8_t HSICalibrationValue)
{
uint32_t tmpreg = 0;
/* Check the parameters */
assert_param(IS_RCC_HSI_CALIBRATION_VALUE(HSICalibrationValue));
tmpreg = RCC->ICSCR;
/* Clear HSITRIM[4:0] bits */
tmpreg &= ~RCC_ICSCR_HSITRIM;
/* Set the HSITRIM[4:0] bits according to HSICalibrationValue value */
tmpreg |= (uint32_t)HSICalibrationValue << 8;
/* Store the new value */
RCC->ICSCR = tmpreg;
}
/**
* @brief Adjusts the Internal Multi Speed oscillator (MSI) calibration value.
* @param MSICalibrationValue: specifies the MSI calibration trimming value.
* This parameter must be a number between 0 and 0xFF.
* @retval None
*/
void RCC_AdjustMSICalibrationValue(uint8_t MSICalibrationValue)
{
/* Check the parameters */
assert_param(IS_RCC_MSI_CALIBRATION_VALUE(MSICalibrationValue));
*(__IO uint8_t *) ICSCR_BYTE4_ADDRESS = MSICalibrationValue;
}
/**
* @brief Configures the Internal Multi Speed oscillator (MSI) clock range.
* @param RCC_MSIRange: specifies the MSI Clcok range.
* This parameter must be one of the following values:
* @arg RCC_MSIRange_64KHz: MSI clock is around 64 KHz
* @arg RCC_MSIRange_128KHz: MSI clock is around 128 KHz
* @arg RCC_MSIRange_256KHz: MSI clock is around 256 KHz
* @arg RCC_MSIRange_512KHz: MSI clock is around 512 KHz
* @arg RCC_MSIRange_1MHz: MSI clock is around 1 MHz
* @arg RCC_MSIRange_2MHz: MSI clock is around 2 MHz
* @arg RCC_MSIRange_4MHz: MSI clock is around 4 MHz
* @retval None
*/
void RCC_MSIRangeConfig(uint32_t RCC_MSIRange)
{
uint32_t tmpreg = 0;
/* Check the parameters */
assert_param(IS_RCC_MSI_CLOCK_RANGE(RCC_MSIRange));
tmpreg = RCC->ICSCR;
/* Clear MSIRANGE[2:0] bits */
tmpreg &= ~RCC_ICSCR_MSIRANGE;
/* Set the MSIRANGE[2:0] bits according to RCC_MSIRange value */
tmpreg |= (uint32_t)RCC_MSIRange;
/* Store the new value */
RCC->ICSCR = tmpreg;
}
/**
* @brief Enables or disables the Internal Multi Speed oscillator (MSI).
* @note MSI can not be stopped if it is used directly as system clock.
* @param NewState: new state of the MSI.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_MSICmd(FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_FUNCTIONAL_STATE(NewState));
*(__IO uint32_t *) CR_MSION_BB = (uint32_t)NewState;
}
/**
* @brief Enables or disables the Internal High Speed oscillator (HSI).
* @note HSI can not be stopped if it is used directly or through the PLL as system clock.
* @param NewState: new state of the HSI.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_HSICmd(FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_FUNCTIONAL_STATE(NewState));
*(__IO uint32_t *) CR_HSION_BB = (uint32_t)NewState;
}
/**
* @brief Configures the PLL clock source and multiplication factor.
* @note This function must be used only when the PLL is disabled.
* @param RCC_PLLSource: specifies the PLL entry clock source.
* This parameter can be one of the following values:
* @arg RCC_PLLSource_HSI: HSI oscillator clock selected as PLL clock entry
* @arg RCC_PLLSource_HSE: HSE oscillator clock selected as PLL clock entry
* @param RCC_PLLMul: specifies the PLL multiplication factor.
* This parameter can be:
* @arg RCC_PLLMul_3: PLL Clock entry multiplied by 3
* @arg RCC_PLLMul_4: PLL Clock entry multiplied by 4
* @arg RCC_PLLMul_6: PLL Clock entry multiplied by 6
* @arg RCC_PLLMul_8: PLL Clock entry multiplied by 8
* @arg RCC_PLLMul_12: PLL Clock entry multiplied by 12
* @arg RCC_PLLMul_16: PLL Clock entry multiplied by 16
* @arg RCC_PLLMul_24: PLL Clock entry multiplied by 24
* @arg RCC_PLLMul_32: PLL Clock entry multiplied by 32
* @arg RCC_PLLMul_48: PLL Clock entry multiplied by 48
* @param RCC_PLLDiv: specifies the PLL division factor.
* This parameter can be:
* @arg RCC_PLLDiv_2: PLL Clock output divided by 2
* @arg RCC_PLLDiv_3: PLL Clock output divided by 3
* @arg RCC_PLLDiv_4: PLL Clock output divided by 4
* @retval None
*/
void RCC_PLLConfig(uint8_t RCC_PLLSource, uint8_t RCC_PLLMul, uint8_t RCC_PLLDiv)
{
/* Check the parameters */
assert_param(IS_RCC_PLL_SOURCE(RCC_PLLSource));
assert_param(IS_RCC_PLL_MUL(RCC_PLLMul));
assert_param(IS_RCC_PLL_DIV(RCC_PLLDiv));
*(__IO uint8_t *) CFGR_BYTE3_ADDRESS = (uint8_t)(RCC_PLLSource | ((uint8_t)(RCC_PLLMul | (uint8_t)(RCC_PLLDiv))));
}
/**
* @brief Enables or disables the PLL.
* @note The PLL can not be disabled if it is used as system clock.
* @param NewState: new state of the PLL.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_PLLCmd(FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_FUNCTIONAL_STATE(NewState));
*(__IO uint32_t *) CR_PLLON_BB = (uint32_t)NewState;
}
/**
* @brief Configures the system clock (SYSCLK).
* @param RCC_SYSCLKSource: specifies the clock source used as system clock.
* This parameter can be one of the following values:
* @arg RCC_SYSCLKSource_MSI: MSI selected as system clock
* @arg RCC_SYSCLKSource_HSI: HSI selected as system clock
* @arg RCC_SYSCLKSource_HSE: HSE selected as system clock
* @arg RCC_SYSCLKSource_PLLCLK: PLL selected as system clock
* @retval None
*/
void RCC_SYSCLKConfig(uint32_t RCC_SYSCLKSource)
{
uint32_t tmpreg = 0;
/* Check the parameters */
assert_param(IS_RCC_SYSCLK_SOURCE(RCC_SYSCLKSource));
tmpreg = RCC->CFGR;
/* Clear SW[1:0] bits */
tmpreg &= ~RCC_CFGR_SW;
/* Set SW[1:0] bits according to RCC_SYSCLKSource value */
tmpreg |= RCC_SYSCLKSource;
/* Store the new value */
RCC->CFGR = tmpreg;
}
/**
* @brief Returns the clock source used as system clock.
* @param None
* @retval The clock source used as system clock. The returned value can be one
* of the following values:
* - 0x00: MSI used as system clock
* - 0x04: HSI used as system clock
* - 0x08: HSE used as system clock
* - 0x0C: PLL used as system clock
*/
uint8_t RCC_GetSYSCLKSource(void)
{
return ((uint8_t)(RCC->CFGR & RCC_CFGR_SWS));
}
/**
* @brief Configures the AHB clock (HCLK).
* @param RCC_SYSCLK: defines the AHB clock divider. This clock is derived from
* the system clock (SYSCLK).
* This parameter can be one of the following values:
* @arg RCC_SYSCLK_Div1: AHB clock = SYSCLK
* @arg RCC_SYSCLK_Div2: AHB clock = SYSCLK/2
* @arg RCC_SYSCLK_Div4: AHB clock = SYSCLK/4
* @arg RCC_SYSCLK_Div8: AHB clock = SYSCLK/8
* @arg RCC_SYSCLK_Div16: AHB clock = SYSCLK/16
* @arg RCC_SYSCLK_Div64: AHB clock = SYSCLK/64
* @arg RCC_SYSCLK_Div128: AHB clock = SYSCLK/128
* @arg RCC_SYSCLK_Div256: AHB clock = SYSCLK/256
* @arg RCC_SYSCLK_Div512: AHB clock = SYSCLK/512
* @retval None
*/
void RCC_HCLKConfig(uint32_t RCC_SYSCLK)
{
uint32_t tmpreg = 0;
/* Check the parameters */
assert_param(IS_RCC_HCLK(RCC_SYSCLK));
tmpreg = RCC->CFGR;
/* Clear HPRE[3:0] bits */
tmpreg &= ~RCC_CFGR_HPRE;
/* Set HPRE[3:0] bits according to RCC_SYSCLK value */
tmpreg |= RCC_SYSCLK;
/* Store the new value */
RCC->CFGR = tmpreg;
}
/**
* @brief Configures the Low Speed APB clock (PCLK1).
* @param RCC_HCLK: defines the APB1 clock divider. This clock is derived from
* the AHB clock (HCLK).
* This parameter can be one of the following values:
* @arg RCC_HCLK_Div1: APB1 clock = HCLK
* @arg RCC_HCLK_Div2: APB1 clock = HCLK/2
* @arg RCC_HCLK_Div4: APB1 clock = HCLK/4
* @arg RCC_HCLK_Div8: APB1 clock = HCLK/8
* @arg RCC_HCLK_Div16: APB1 clock = HCLK/16
* @retval None
*/
void RCC_PCLK1Config(uint32_t RCC_HCLK)
{
uint32_t tmpreg = 0;
/* Check the parameters */
assert_param(IS_RCC_PCLK(RCC_HCLK));
tmpreg = RCC->CFGR;
/* Clear PPRE1[2:0] bits */
tmpreg &= ~RCC_CFGR_PPRE1;
/* Set PPRE1[2:0] bits according to RCC_HCLK value */
tmpreg |= RCC_HCLK;
/* Store the new value */
RCC->CFGR = tmpreg;
}
/**
* @brief Configures the High Speed APB clock (PCLK2).
* @param RCC_HCLK: defines the APB2 clock divider. This clock is derived from
* the AHB clock (HCLK).
* This parameter can be one of the following values:
* @arg RCC_HCLK_Div1: APB2 clock = HCLK
* @arg RCC_HCLK_Div2: APB2 clock = HCLK/2
* @arg RCC_HCLK_Div4: APB2 clock = HCLK/4
* @arg RCC_HCLK_Div8: APB2 clock = HCLK/8
* @arg RCC_HCLK_Div16: APB2 clock = HCLK/16
* @retval None
*/
void RCC_PCLK2Config(uint32_t RCC_HCLK)
{
uint32_t tmpreg = 0;
/* Check the parameters */
assert_param(IS_RCC_PCLK(RCC_HCLK));
tmpreg = RCC->CFGR;
/* Clear PPRE2[2:0] bits */
tmpreg &= ~RCC_CFGR_PPRE2;
/* Set PPRE2[2:0] bits according to RCC_HCLK value */
tmpreg |= RCC_HCLK << 3;
/* Store the new value */
RCC->CFGR = tmpreg;
}
/**
* @brief Enables or disables the specified RCC interrupts.
* @param RCC_IT: specifies the RCC interrupt sources to be enabled or disabled.
* This parameter can be any combination of the following values:
* @arg RCC_IT_LSIRDY: LSI ready interrupt
* @arg RCC_IT_LSERDY: LSE ready interrupt
* @arg RCC_IT_HSIRDY: HSI ready interrupt
* @arg RCC_IT_HSERDY: HSE ready interrupt
* @arg RCC_IT_PLLRDY: PLL ready interrupt
* @arg RCC_IT_MSIRDY: MSI ready interrupt
* @param NewState: new state of the specified RCC interrupts.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_ITConfig(uint8_t RCC_IT, FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_RCC_IT(RCC_IT));
assert_param(IS_FUNCTIONAL_STATE(NewState));
if (NewState != DISABLE)
{
/* Perform Byte access to RCC_CIR[12:8] bits to enable the selected interrupts */
*(__IO uint8_t *) CIR_BYTE2_ADDRESS |= RCC_IT;
}
else
{
/* Perform Byte access to RCC_CIR[12:8] bits to disable the selected interrupts */
*(__IO uint8_t *) CIR_BYTE2_ADDRESS &= (uint8_t)~RCC_IT;
}
}
/**
* @brief Configures the External Low Speed oscillator (LSE).
* @param RCC_LSE: specifies the new state of the LSE.
* This parameter can be one of the following values:
* @arg RCC_LSE_OFF: LSE oscillator OFF
* @arg RCC_LSE_ON: LSE oscillator ON
* @arg RCC_LSE_Bypass: LSE oscillator bypassed with external clock
* @retval None
*/
void RCC_LSEConfig(uint8_t RCC_LSE)
{
/* Check the parameters */
assert_param(IS_RCC_LSE(RCC_LSE));
/* Reset LSEON and LSEBYP bits before configuring the LSE ------------------*/
*(__IO uint8_t *) CSR_BYTE2_ADDRESS = RCC_LSE_OFF;
/* Set the new LSE configuration -------------------------------------------*/
*(__IO uint8_t *) CSR_BYTE2_ADDRESS = RCC_LSE;
}
/**
* @brief Enables or disables the Internal Low Speed oscillator (LSI).
* @note LSI can not be disabled if the IWDG is running.
* @param NewState: new state of the LSI.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_LSICmd(FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_FUNCTIONAL_STATE(NewState));
*(__IO uint32_t *) CSR_LSION_BB = (uint32_t)NewState;
}
/**
* @brief Configures the RTC and LCD clock (RTCCLK / LCDCLK).
* @note
* - Once the RTC clock is selected it can't be changed unless the RTC is
* reset using RCC_RTCResetCmd function.
* - This RTC clock (RTCCLK) is used to clock the LCD (LCDCLK).
* @param RCC_RTCCLKSource: specifies the RTC clock source.
* This parameter can be one of the following values:
* @arg RCC_RTCCLKSource_LSE: LSE selected as RTC clock
* @arg RCC_RTCCLKSource_LSI: LSI selected as RTC clock
* @arg RCC_RTCCLKSource_HSE_Div2: HSE divided by 2 selected as RTC clock
* @arg RCC_RTCCLKSource_HSE_Div4: HSE divided by 4 selected as RTC clock
* @arg RCC_RTCCLKSource_HSE_Div8: HSE divided by 8 selected as RTC clock
* @arg RCC_RTCCLKSource_HSE_Div16: HSE divided by 16 selected as RTC clock
* @retval None
*/
void RCC_RTCCLKConfig(uint32_t RCC_RTCCLKSource)
{
uint32_t tmpreg = 0;
/* Check the parameters */
assert_param(IS_RCC_RTCCLK_SOURCE(RCC_RTCCLKSource));
if ((RCC_RTCCLKSource & RCC_CSR_RTCSEL_HSE) == RCC_CSR_RTCSEL_HSE)
{
/* If HSE is selected as RTC clock source, configure HSE division factor for RTC clock */
tmpreg = RCC->CR;
/* Clear RTCPRE[1:0] bits */
tmpreg &= ~RCC_CR_RTCPRE;
/* Configure HSE division factor for RTC clock */
tmpreg |= (RCC_RTCCLKSource & RCC_CR_RTCPRE);
/* Store the new value */
RCC->CR = tmpreg;
}
RCC->CSR &= ~RCC_CSR_RTCSEL;
/* Select the RTC clock source */
RCC->CSR |= (RCC_RTCCLKSource & RCC_CSR_RTCSEL);
}
/**
* @brief Enables or disables the RTC clock.
* @note This function must be used only after the RTC clock was selected using the
* RCC_RTCCLKConfig function.
* @param NewState: new state of the RTC clock.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_RTCCLKCmd(FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_FUNCTIONAL_STATE(NewState));
*(__IO uint32_t *) CSR_RTCEN_BB = (uint32_t)NewState;
}
/**
* @brief Forces or releases the RTC peripheral reset.
* @param NewState: new state of the RTC reset.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_RTCResetCmd(FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_FUNCTIONAL_STATE(NewState));
*(__IO uint32_t *) CSR_RTCRST_BB = (uint32_t)NewState;
}
/**
* @brief Returns the frequencies of different on chip clocks.
* @param RCC_Clocks: pointer to a RCC_ClocksTypeDef structure which will hold
* the clocks frequencies.
* @retval None
*/
void RCC_GetClocksFreq(RCC_ClocksTypeDef* RCC_Clocks)
{
uint32_t tmp = 0, pllmul = 0, plldiv = 0, pllsource = 0, presc = 0, msirange = 0;
/* Get SYSCLK source -------------------------------------------------------*/
tmp = RCC->CFGR & RCC_CFGR_SWS;
switch (tmp)
{
case 0x00: /* MSI used as system clock */
msirange = (RCC->ICSCR & RCC_ICSCR_MSIRANGE ) >> 13;
RCC_Clocks->SYSCLK_Frequency = (((1 << msirange) * 64000) - (MSITable[msirange] * 24000));
break;
case 0x04: /* HSI used as system clock */
RCC_Clocks->SYSCLK_Frequency = HSI_VALUE;
break;
case 0x08: /* HSE used as system clock */
RCC_Clocks->SYSCLK_Frequency = HSE_VALUE;
break;
case 0x0C: /* PLL used as system clock */
/* Get PLL clock source and multiplication factor ----------------------*/
pllmul = RCC->CFGR & RCC_CFGR_PLLMUL;
plldiv = RCC->CFGR & RCC_CFGR_PLLDIV;
pllmul = PLLMulTable[(pllmul >> 18)];
plldiv = (plldiv >> 22) + 1;
pllsource = RCC->CFGR & RCC_CFGR_PLLSRC;
if (pllsource == 0x00)
{
/* HSI oscillator clock selected as PLL clock entry */
RCC_Clocks->SYSCLK_Frequency = (((HSI_VALUE) * pllmul) / plldiv);
}
else
{
/* HSE selected as PLL clock entry */
RCC_Clocks->SYSCLK_Frequency = (((HSE_VALUE) * pllmul) / plldiv);
}
break;
default:
RCC_Clocks->SYSCLK_Frequency = HSI_VALUE;
break;
}
/* Compute HCLK, PCLK1, PCLK2 and ADCCLK clocks frequencies ----------------*/
/* Get HCLK prescaler */
tmp = RCC->CFGR & RCC_CFGR_HPRE;
tmp = tmp >> 4;
presc = APBAHBPrescTable[tmp];
/* HCLK clock frequency */
RCC_Clocks->HCLK_Frequency = RCC_Clocks->SYSCLK_Frequency >> presc;
/* Get PCLK1 prescaler */
tmp = RCC->CFGR & RCC_CFGR_PPRE1;
tmp = tmp >> 8;
presc = APBAHBPrescTable[tmp];
/* PCLK1 clock frequency */
RCC_Clocks->PCLK1_Frequency = RCC_Clocks->HCLK_Frequency >> presc;
/* Get PCLK2 prescaler */
tmp = RCC->CFGR & RCC_CFGR_PPRE2;
tmp = tmp >> 11;
presc = APBAHBPrescTable[tmp];
/* PCLK2 clock frequency */
RCC_Clocks->PCLK2_Frequency = RCC_Clocks->HCLK_Frequency >> presc;
}
/**
* @brief Enables or disables the AHB peripheral clock.
* @param RCC_AHBPeriph: specifies the AHB peripheral to gates its clock.
* This parameter can be any combination of the following values:
* @arg RCC_AHBPeriph_GPIOA
* @arg RCC_AHBPeriph_GPIOB
* @arg RCC_AHBPeriph_GPIOC
* @arg RCC_AHBPeriph_GPIOD
* @arg RCC_AHBPeriph_GPIOE
* @arg RCC_AHBPeriph_GPIOH
* @arg RCC_AHBPeriph_CRC
* @arg RCC_AHBPeriph_FLITF (has effect only when the Flash memory is in power down mode)
* @arg RCC_AHBPeriph_DMA1
* @param NewState: new state of the specified peripheral clock.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_AHBPeriphClockCmd(uint32_t RCC_AHBPeriph, FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_RCC_AHB_PERIPH(RCC_AHBPeriph));
assert_param(IS_FUNCTIONAL_STATE(NewState));
if (NewState != DISABLE)
{
RCC->AHBENR |= RCC_AHBPeriph;
}
else
{
RCC->AHBENR &= ~RCC_AHBPeriph;
}
}
/**
* @brief Enables or disables the High Speed APB (APB2) peripheral clock.
* @param RCC_APB2Periph: specifies the APB2 peripheral to gates its clock.
* This parameter can be any combination of the following values:
* @arg RCC_APB2Periph_SYSCFG
* @arg RCC_APB2Periph_TIM9
* @arg RCC_APB2Periph_TIM10
* @arg RCC_APB2Periph_TIM11
* @arg RCC_APB2Periph_ADC1
* @arg RCC_APB2Periph_SPI1
* @arg RCC_APB2Periph_USART1
* @param NewState: new state of the specified peripheral clock.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_APB2PeriphClockCmd(uint32_t RCC_APB2Periph, FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_RCC_APB2_PERIPH(RCC_APB2Periph));
assert_param(IS_FUNCTIONAL_STATE(NewState));
if (NewState != DISABLE)
{
RCC->APB2ENR |= RCC_APB2Periph;
}
else
{
RCC->APB2ENR &= ~RCC_APB2Periph;
}
}
/**
* @brief Enables or disables the Low Speed APB (APB1) peripheral clock.
* @param RCC_APB1Periph: specifies the APB1 peripheral to gates its clock.
* This parameter can be any combination of the following values:
* @arg RCC_APB1Periph_TIM2
* @arg RCC_APB1Periph_TIM3
* @arg RCC_APB1Periph_TIM4
* @arg RCC_APB1Periph_TIM6
* @arg RCC_APB1Periph_TIM7
* @arg RCC_APB1Periph_LCD
* @arg RCC_APB1Periph_WWDG
* @arg RCC_APB1Periph_SPI2
* @arg RCC_APB1Periph_USART2
* @arg RCC_APB1Periph_USART3
* @arg RCC_APB1Periph_I2C1
* @arg RCC_APB1Periph_I2C2
* @arg RCC_APB1Periph_USB
* @arg RCC_APB1Periph_PWR
* @arg RCC_APB1Periph_DAC
* @arg RCC_APB1Periph_COMP
* @param NewState: new state of the specified peripheral clock.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_APB1PeriphClockCmd(uint32_t RCC_APB1Periph, FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_RCC_APB1_PERIPH(RCC_APB1Periph));
assert_param(IS_FUNCTIONAL_STATE(NewState));
if (NewState != DISABLE)
{
RCC->APB1ENR |= RCC_APB1Periph;
}
else
{
RCC->APB1ENR &= ~RCC_APB1Periph;
}
}
/**
* @brief Forces or releases AHB peripheral reset.
* @param RCC_AHBPeriph: specifies the AHB peripheral to reset.
* This parameter can be any combination of the following values:
* @arg RCC_AHBPeriph_GPIOA
* @arg RCC_AHBPeriph_GPIOB
* @arg RCC_AHBPeriph_GPIOC
* @arg RCC_AHBPeriph_GPIOD
* @arg RCC_AHBPeriph_GPIOE
* @arg RCC_AHBPeriph_GPIOH
* @arg RCC_AHBPeriph_CRC
* @arg RCC_AHBPeriph_FLITF (has effect only when the Flash memory is in power down mode)
* @arg RCC_AHBPeriph_DMA1
* @param NewState: new state of the specified peripheral reset.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_AHBPeriphResetCmd(uint32_t RCC_AHBPeriph, FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_RCC_AHB_PERIPH(RCC_AHBPeriph));
assert_param(IS_FUNCTIONAL_STATE(NewState));
if (NewState != DISABLE)
{
RCC->AHBRSTR |= RCC_AHBPeriph;
}
else
{
RCC->AHBRSTR &= ~RCC_AHBPeriph;
}
}
/**
* @brief Forces or releases High Speed APB (APB2) peripheral reset.
* @param RCC_APB2Periph: specifies the APB2 peripheral to reset.
* This parameter can be any combination of the following values:
* @arg RCC_APB2Periph_SYSCFG
* @arg RCC_APB2Periph_TIM9
* @arg RCC_APB2Periph_TIM10
* @arg RCC_APB2Periph_TIM11
* @arg RCC_APB2Periph_ADC1
* @arg RCC_APB2Periph_SPI1
* @arg RCC_APB2Periph_USART1
* @param NewState: new state of the specified peripheral reset.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_APB2PeriphResetCmd(uint32_t RCC_APB2Periph, FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_RCC_APB2_PERIPH(RCC_APB2Periph));
assert_param(IS_FUNCTIONAL_STATE(NewState));
if (NewState != DISABLE)
{
RCC->APB2RSTR |= RCC_APB2Periph;
}
else
{
RCC->APB2RSTR &= ~RCC_APB2Periph;
}
}
/**
* @brief Forces or releases Low Speed APB (APB1) peripheral reset.
* @param RCC_APB1Periph: specifies the APB1 peripheral to reset.
* This parameter can be any combination of the following values:
* @arg RCC_APB1Periph_TIM2
* @arg RCC_APB1Periph_TIM3
* @arg RCC_APB1Periph_TIM4
* @arg RCC_APB1Periph_TIM6
* @arg RCC_APB1Periph_TIM7
* @arg RCC_APB1Periph_LCD
* @arg RCC_APB1Periph_WWDG
* @arg RCC_APB1Periph_SPI2
* @arg RCC_APB1Periph_USART2
* @arg RCC_APB1Periph_USART3
* @arg RCC_APB1Periph_I2C1
* @arg RCC_APB1Periph_I2C2
* @arg RCC_APB1Periph_USB
* @arg RCC_APB1Periph_PWR
* @arg RCC_APB1Periph_DAC
* @arg RCC_APB1Periph_COMP
* @param NewState: new state of the specified peripheral clock.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_APB1PeriphResetCmd(uint32_t RCC_APB1Periph, FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_RCC_APB1_PERIPH(RCC_APB1Periph));
assert_param(IS_FUNCTIONAL_STATE(NewState));
if (NewState != DISABLE)
{
RCC->APB1RSTR |= RCC_APB1Periph;
}
else
{
RCC->APB1RSTR &= ~RCC_APB1Periph;
}
}
/**
* @brief Enables or disables the AHB peripheral clock during Low Power (SLEEP) mode.
* @param RCC_AHBPeriph: specifies the AHB peripheral to gates its clock.
* This parameter can be any combination of the following values:
* @arg RCC_AHBPeriph_GPIOA
* @arg RCC_AHBPeriph_GPIOB
* @arg RCC_AHBPeriph_GPIOC
* @arg RCC_AHBPeriph_GPIOD
* @arg RCC_AHBPeriph_GPIOE
* @arg RCC_AHBPeriph_GPIOH
* @arg RCC_AHBPeriph_CRC
* @arg RCC_AHBPeriph_FLITF (has effect only when the Flash memory is in power down mode)
* @arg RCC_AHBPeriph_SRAM
* @arg RCC_AHBPeriph_DMA1
* @param NewState: new state of the specified peripheral clock.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_AHBPeriphClockLPModeCmd(uint32_t RCC_AHBPeriph, FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_RCC_AHB_LPMODE_PERIPH(RCC_AHBPeriph));
assert_param(IS_FUNCTIONAL_STATE(NewState));
if (NewState != DISABLE)
{
RCC->AHBLPENR |= RCC_AHBPeriph;
}
else
{
RCC->AHBLPENR &= ~RCC_AHBPeriph;
}
}
/**
* @brief Enables or disables the APB2 peripheral clock during Low Power (SLEEP) mode.
* @param RCC_APB2Periph: specifies the APB2 peripheral to gates its clock.
* This parameter can be any combination of the following values:
* @arg RCC_APB2Periph_SYSCFG
* @arg RCC_APB2Periph_TIM9
* @arg RCC_APB2Periph_TIM10
* @arg RCC_APB2Periph_TIM11
* @arg RCC_APB2Periph_ADC1
* @arg RCC_APB2Periph_SPI1
* @arg RCC_APB2Periph_USART1
* @param NewState: new state of the specified peripheral clock.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_APB2PeriphClockLPModeCmd(uint32_t RCC_APB2Periph, FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_RCC_APB2_PERIPH(RCC_APB2Periph));
assert_param(IS_FUNCTIONAL_STATE(NewState));
if (NewState != DISABLE)
{
RCC->APB2LPENR |= RCC_APB2Periph;
}
else
{
RCC->APB2LPENR &= ~RCC_APB2Periph;
}
}
/**
* @brief Enables or disables the APB1 peripheral clock during Low Power (SLEEP) mode.
* @param RCC_APB1Periph: specifies the APB1 peripheral to gates its clock.
* This parameter can be any combination of the following values:
* @arg RCC_APB1Periph_TIM2
* @arg RCC_APB1Periph_TIM3
* @arg RCC_APB1Periph_TIM4
* @arg RCC_APB1Periph_TIM6
* @arg RCC_APB1Periph_TIM7
* @arg RCC_APB1Periph_LCD
* @arg RCC_APB1Periph_WWDG
* @arg RCC_APB1Periph_SPI2
* @arg RCC_APB1Periph_USART2
* @arg RCC_APB1Periph_USART3
* @arg RCC_APB1Periph_I2C1
* @arg RCC_APB1Periph_I2C2
* @arg RCC_APB1Periph_USB
* @arg RCC_APB1Periph_PWR
* @arg RCC_APB1Periph_DAC
* @arg RCC_APB1Periph_COMP
* @param NewState: new state of the specified peripheral clock.
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_APB1PeriphClockLPModeCmd(uint32_t RCC_APB1Periph, FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_RCC_APB1_PERIPH(RCC_APB1Periph));
assert_param(IS_FUNCTIONAL_STATE(NewState));
if (NewState != DISABLE)
{
RCC->APB1LPENR |= RCC_APB1Periph;
}
else
{
RCC->APB1LPENR &= ~RCC_APB1Periph;
}
}
/**
* @brief Enables or disables the Clock Security System.
* @param NewState: new state of the Clock Security System..
* This parameter can be: ENABLE or DISABLE.
* @retval None
*/
void RCC_ClockSecuritySystemCmd(FunctionalState NewState)
{
/* Check the parameters */
assert_param(IS_FUNCTIONAL_STATE(NewState));
*(__IO uint32_t *) CR_CSSON_BB = (uint32_t)NewState;
}
/**
* @brief Selects the clock source to output on MCO pin.
* @param RCC_MCOSource: specifies the clock source to output.
* This parameter can be one of the following values:
* @arg RCC_MCOSource_NoClock: No clock selected
* @arg RCC_MCOSource_SYSCLK: System clock selected
* @arg RCC_MCOSource_HSI: HSI oscillator clock selected
* @arg RCC_MCOSource_MSI: MSI oscillator clock selected
* @arg RCC_MCOSource_HSE: HSE oscillator clock selected
* @arg RCC_MCOSource_PLLCLK: PLL clock selected
* @arg RCC_MCOSource_LSI: LSI clock selected
* @arg RCC_MCOSource_LSE: LSE clock selected
* @param RCC_MCODiv: specifies the MCO prescaler.
* This parameter can be one of the following values:
* @arg RCC_MCODiv_1: no division applied to MCO clock
* @arg RCC_MCODiv_2: division by 2 applied to MCO clock
* @arg RCC_MCODiv_4: division by 4 applied to MCO clock
* @arg RCC_MCODiv_8: division by 8 applied to MCO clock
* @arg RCC_MCODiv_16: division by 16 applied to MCO clock
* @retval None
*/
void RCC_MCOConfig(uint8_t RCC_MCOSource, uint8_t RCC_MCODiv)
{
/* Check the parameters */
assert_param(IS_RCC_MCO_SOURCE(RCC_MCOSource));
assert_param(IS_RCC_MCO_DIV(RCC_MCODiv));
/* Select MCO clock source and prescaler */
*(__IO uint8_t *) CFGR_BYTE4_ADDRESS = RCC_MCOSource | RCC_MCODiv;
}
/**
* @brief Checks whether the specified RCC flag is set or not.
* @param RCC_FLAG: specifies the flag to check.
* This parameter can be one of the following values:
* @arg RCC_FLAG_HSIRDY: HSI oscillator clock ready
* @arg RCC_FLAG_MSIRDY: MSI oscillator clock ready
* @arg RCC_FLAG_HSERDY: HSE oscillator clock ready
* @arg RCC_FLAG_PLLRDY: PLL clock ready
* @arg RCC_FLAG_LSERDY: LSE oscillator clock ready
* @arg RCC_FLAG_LSIRDY: LSI oscillator clock ready
* @arg RCC_FLAG_OBLRST: Option Byte Loader (OBL) reset
* @arg RCC_FLAG_PINRST: Pin reset
* @arg RCC_FLAG_PORRST: POR/PDR reset
* @arg RCC_FLAG_SFTRST: Software reset
* @arg RCC_FLAG_IWDGRST: Independent Watchdog reset
* @arg RCC_FLAG_WWDGRST: Window Watchdog reset
* @arg RCC_FLAG_LPWRRST: Low Power reset
* @retval The new state of RCC_FLAG (SET or RESET).
*/
FlagStatus RCC_GetFlagStatus(uint8_t RCC_FLAG)
{
uint32_t tmp = 0;
uint32_t statusreg = 0;
FlagStatus bitstatus = RESET;
/* Check the parameters */
assert_param(IS_RCC_FLAG(RCC_FLAG));
/* Get the RCC register index */
tmp = RCC_FLAG >> 5;
if (tmp == 1) /* The flag to check is in CR register */
{
statusreg = RCC->CR;
}
else /* The flag to check is in CSR register (tmp == 2) */
{
statusreg = RCC->CSR;
}
/* Get the flag position */
tmp = RCC_FLAG & FLAG_MASK;
if ((statusreg & ((uint32_t)1 << tmp)) != (uint32_t)RESET)
{
bitstatus = SET;
}
else
{
bitstatus = RESET;
}
/* Return the flag status */
return bitstatus;
}
/**
* @brief Clears the RCC reset flags.
* The reset flags are: RCC_FLAG_OBLRST, RCC_FLAG_PINRST, RCC_FLAG_PORRST,
* RCC_FLAG_SFTRST, RCC_FLAG_IWDGRST, RCC_FLAG_WWDGRST, RCC_FLAG_LPWRRST.
* @param None
* @retval None
*/
void RCC_ClearFlag(void)
{
/* Set RMVF bit to clear the reset flags */
RCC->CSR |= RCC_CSR_RMVF;
}
/**
* @brief Checks whether the specified RCC interrupt has occurred or not.
* @param RCC_IT: specifies the RCC interrupt source to check.
* This parameter can be one of the following values:
* @arg RCC_IT_LSIRDY: LSI ready interrupt
* @arg RCC_IT_LSERDY: LSE ready interrupt
* @arg RCC_IT_HSIRDY: HSI ready interrupt
* @arg RCC_IT_HSERDY: HSE ready interrupt
* @arg RCC_IT_PLLRDY: PLL ready interrupt
* @arg RCC_IT_MSIRDY: MSI ready interrupt
* @arg RCC_IT_CSS: Clock Security System interrupt
* @retval The new state of RCC_IT (SET or RESET).
*/
ITStatus RCC_GetITStatus(uint8_t RCC_IT)
{
ITStatus bitstatus = RESET;
/* Check the parameters */
assert_param(IS_RCC_GET_IT(RCC_IT));
/* Check the status of the specified RCC interrupt */
if ((RCC->CIR & RCC_IT) != (uint32_t)RESET)
{
bitstatus = SET;
}
else
{
bitstatus = RESET;
}
/* Return the RCC_IT status */
return bitstatus;
}
/**
* @brief Clears the RCC's interrupt pending bits.
* @param RCC_IT: specifies the interrupt pending bit to clear.
* This parameter can be any combination of the following values:
* @arg RCC_IT_LSIRDY: LSI ready interrupt
* @arg RCC_IT_LSERDY: LSE ready interrupt
* @arg RCC_IT_HSIRDY: HSI ready interrupt
* @arg RCC_IT_HSERDY: HSE ready interrupt
* @arg RCC_IT_PLLRDY: PLL ready interrupt
* @arg RCC_IT_MSIRDY: MSI ready interrupt
* @arg RCC_IT_CSS: Clock Security System interrupt
* @retval None
*/
void RCC_ClearITPendingBit(uint8_t RCC_IT)
{
/* Check the parameters */
assert_param(IS_RCC_CLEAR_IT(RCC_IT));
/* Perform Byte access to RCC_CIR[23:16] bits to clear the selected interrupt
pending bits */
*(__IO uint8_t *) CIR_BYTE3_ADDRESS = RCC_IT;
}
/**
* @}
*/
/**
* @}
*/
/**
* @}
*/
/******************* (C) COPYRIGHT 2010 STMicroelectronics *****END OF FILE****/
|
ankurshukla1993/IOT-test | coeey/com/google/android/gms/internal/zzx.java | package com.google.android.gms.internal;
public interface zzx {
int zza();
void zza(zzaa com_google_android_gms_internal_zzaa) throws zzaa;
int zzb();
}
|
jiadaizhao/LeetCode | 0201-0300/0271-Encode and Decode Strings/0271-Encode and Decode Strings.py | class Codec:
def encode(self, strs: [str]) -> str:
"""Encodes a list of strings to a single string.
"""
return ''.join(str(len(s)) + '@' + s for s in strs)
def decode(self, s: str) -> [str]:
"""Decodes a single string to a list of strings.
"""
result = []
i = 0
while i < len(s):
j = s.find('@', i)
i = j + 1 + int(s[i: j])
result.append(s[j + 1: i])
return result
# Your Codec object will be instantiated and called as such:
# codec = Codec()
# codec.decode(codec.encode(strs))
|
shirohoo/spring-boot-examples | spring-webflux/src/main/java/io/github/shirohoo/reactive/app/MongoDBLoader.java | <reponame>shirohoo/spring-boot-examples
package io.github.shirohoo.reactive.app;
import org.springframework.boot.CommandLineRunner;
import org.springframework.context.annotation.Bean;
import org.springframework.data.mongodb.core.MongoOperations;
import org.springframework.stereotype.Component;
@Component
class MongoDBLoader {
@Bean
CommandLineRunner initialize(MongoOperations mongo) {
return args -> {
mongo.save(Item.of("Alf alarm clock", 19.99));
mongo.save(Item.of("Smurf TV tray", 24.99));
mongo.save(Item.of("Wireless charging station", 25.99));
mongo.save(Item.of("iPhone lightning cable", 39.99));
mongo.save(Item.of("Double dog leash", 3.07));
mongo.save(Item.of("Pet interactive sound Toy", 2.29));
};
}
}
|
m-nakagawa/sample | jena-3.0.1/jena-core/src/main/java/org/apache/jena/reasoner/rulesys/RDFSRuleInfGraph.java | <reponame>m-nakagawa/sample
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.reasoner.rulesys;
import java.util.*;
import org.apache.jena.graph.* ;
import org.apache.jena.reasoner.* ;
/**
* Customization of the generic rule inference graph for RDFS inference.
* In fact all the rule processing is unchanged, the only extenstion is
* the validation support.
*/
public class RDFSRuleInfGraph extends FBRuleInfGraph {
/**
* Constructor.
* @param reasoner the reasoner which created this inf graph instance
* @param rules the rules to process
* @param schema the (optional) schema graph to be included
*/
public RDFSRuleInfGraph(Reasoner reasoner, List<Rule> rules, Graph schema) {
super(reasoner, rules, schema);
}
/**
* Constructor.
* @param reasoner the reasoner which created this inf graph instance
* @param rules the rules to process
* @param schema the (optional) schema graph to be included
* @param data the data graph to be processed
*/
public RDFSRuleInfGraph(Reasoner reasoner, List<Rule> rules, Graph schema, Graph data) {
super(reasoner, rules, schema, data);
}
/**
* Test the consistency of the bound data. For RDFS this checks that all
* instances of datatype-ranged properties have correct data values.
*
* @return a ValidityReport structure
*/
@Override
public ValidityReport validate() {
// The full configuration uses validation rules so check for these
StandardValidityReport report = (StandardValidityReport)super.validate();
// Also do a hardwired check to handle the simpler configurations
performDatatypeRangeValidation(report);
return report;
}
}
|
chenqwwq/_leetcode | src/top.chenqwwq/leetcode/topic/greedy/_1578/SolutionTest.java | package top.chenqwwq.leetcode.topic.greedy._1578;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
/**
* @author chen
* @date 2020-10-26
**/
class SolutionTest {
Solution solution = new Solution();
@Test
void minCost() {
Assertions.assertEquals(3, solution.minCost("abaac", new int[]{1, 2, 3, 4, 5}));
Assertions.assertEquals(0, solution.minCost("abc", new int[]{3, 4, 5}));
Assertions.assertEquals(2, solution.minCost("aabaa", new int[]{1, 3, 4, 5, 1}));
}
} |
hamongkhang/landingpage | resources/js/components/Pricing.js | const Pricing=()=>{
return(
<section id="pricing" className="pricing">
<div className="container">
<div className="section-title" >
<h2>Pricing</h2>
<p>Check our Pricing</p>
</div>
<div className="row" >
<div className="col-lg-3 col-md-6">
<div className="box" data-aos-delay={100}>
<h3>Free</h3>
<h4><sup>$</sup>0<span> / month</span></h4>
<ul>
<li>Aida dere</li>
<li>Nec feugiat nisl</li>
<li>Nulla at volutpat dola</li>
<li className="na">Pharetra massa</li>
<li className="na">Massa ultricies mi</li>
</ul>
<div className="btn-wrap">
<a href="#" className="btn-buy">Buy Now</a>
</div>
</div>
</div>
<div className="col-lg-3 col-md-6 mt-4 mt-md-0">
<div className="box featured" data-aos-delay={200}>
<h3>Business</h3>
<h4><sup>$</sup>19<span> / month</span></h4>
<ul>
<li>Aida dere</li>
<li>Nec feugiat nisl</li>
<li>Nulla at volutpat dola</li>
<li>Pharetra massa</li>
<li className="na">Massa ultricies mi</li>
</ul>
<div className="btn-wrap">
<a href="#" className="btn-buy">Buy Now</a>
</div>
</div>
</div>
<div className="col-lg-3 col-md-6 mt-4 mt-lg-0">
<div className="box" data-aos-delay={300}>
<h3>Developer</h3>
<h4><sup>$</sup>29<span> / month</span></h4>
<ul>
<li>Aida dere</li>
<li>Nec feugiat nisl</li>
<li>Nulla at volutpat dola</li>
<li>Pharetra massa</li>
<li>Massa ultricies mi</li>
</ul>
<div className="btn-wrap">
<a href="#" className="btn-buy">Buy Now</a>
</div>
</div>
</div>
<div className="col-lg-3 col-md-6 mt-4 mt-lg-0">
<div className="box" data-aos-delay={400}>
<span className="advanced">Advanced</span>
<h3>Ultimate</h3>
<h4><sup>$</sup>49<span> / month</span></h4>
<ul>
<li>Aida dere</li>
<li>Nec feugiat nisl</li>
<li>Nulla at volutpat dola</li>
<li>Pharetra massa</li>
<li>Massa ultricies mi</li>
</ul>
<div className="btn-wrap">
<a href="#" className="btn-buy">Buy Now</a>
</div>
</div>
</div>
</div>
</div>
</section>
);
}
export default Pricing; |
fillumina/lcs | lcs/src/test/java/com/fillumina/lcs/AlgorithmsPerformanceTest.java | package com.fillumina.lcs;
import com.fillumina.lcs.helper.LcsList;
import com.fillumina.lcs.testutil.RandomSequenceGenerator;
import com.fillumina.performance.consumer.assertion.PerformanceAssertion;
import com.fillumina.performance.producer.TestContainer;
import com.fillumina.performance.template.AutoProgressionPerformanceTemplate;
import com.fillumina.performance.template.ProgressionConfigurator;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
/**
* Confront the performances of different algorithms. Note that performances
* can vary for different values of {@link #TOTAL} and {@link #LCS}.
*
* @author <NAME>
*/
public class AlgorithmsPerformanceTest extends AutoProgressionPerformanceTemplate {
private static final int TOTAL = 30;
private static final int LCS = 20;
private static final long SEED = System.nanoTime();
private final RandomSequenceGenerator generator;
private final List<Integer> lcsList;
public static void main(String[] args) {
System.out.println("performance evaluation, please wait...");
new AlgorithmsPerformanceTest().executeWithIntermediateOutput();
}
public AlgorithmsPerformanceTest() {
super();
this.generator = new RandomSequenceGenerator(TOTAL, LCS, SEED);
this.lcsList = generator.getLcs();
}
private class LcsRunnable implements Runnable {
private final LcsList lcsAlgorithm;
public LcsRunnable(LcsList lcsAlgorithm) {
this.lcsAlgorithm = lcsAlgorithm;
}
@Override
public void run() {
assertEquals(lcsAlgorithm.getClass().getSimpleName(),
lcsList, lcsAlgorithm.lcs(
generator.getArrayA(), generator.getArrayB()));
}
}
@Override
public void init(ProgressionConfigurator config) {
config.setBaseIterations(100);
config.setTimeout(10, TimeUnit.MINUTES);
config.setMaxStandardDeviation(6);
}
@Override
public void addTests(TestContainer tests) {
tests.addTest("Baseline",
new LcsRunnable(new BaselineLinearSpaceMyersLcs()));
tests.addTest("MyersLcs",
new LcsRunnable(new LcsLengthAdaptor(MyersLcs.INSTANCE)));
tests.addTest("LinearSpaceLcs",
new LcsRunnable(new LcsLengthAdaptor(LinearSpaceMyersLcs.INSTANCE)));
tests.addTest("HirschbergLcs",
new LcsRunnable(new LcsLengthAdaptor(HirschbergLinearSpaceLcs.INSTANCE)));
tests.addTest("WagnerFischerLcs",
new LcsRunnable(new LcsLengthAdaptor(WagnerFischerLcs.INSTANCE)));
}
@Override
public void addAssertions(PerformanceAssertion assertion) {
}
}
|
woodshop/complex-chainer | chainer/functions/concat.py | <filename>chainer/functions/concat.py<gh_stars>1-10
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
_args = 'const float* x, float* y, int cdimx, int cdimy, int rdim, int coffset'
_preamble = '''
#define COPY(statement) \
int l = i / (rdim * cdimx); \
int c = i / rdim % cdimx + coffset; \
int r = i % rdim; \
int idx = r + rdim * (c + cdimy * l); \
statement;
'''
class Concat(function.Function):
"""Concatenate multiple tensors towards specified axis."""
# concat along the channel dimension by default
def __init__(self, axis=1):
self.axis = axis
def check_type_forward(self, in_types):
type_check.expect(in_types.size() > 0)
type_check.expect(in_types[0].ndim >
type_check.Variable(self.axis, 'axis'))
ndim = in_types[0].ndim.eval()
for i in range(1, in_types.size().eval()):
type_check.expect(
in_types[0].dtype == in_types[i].dtype,
in_types[0].ndim == in_types[i].ndim,
)
for d in range(0, ndim):
if d == self.axis:
continue
type_check.expect(in_types[0].shape[d] == in_types[i].shape[d])
def forward_cpu(self, xs):
return numpy.concatenate(xs, axis=self.axis),
def forward_gpu(self, xs):
# TODO(beam2d): Unify the process into a single kernel.
shape = list(xs[0].shape)
for x in xs[1:]:
shape[self.axis] += x.shape[self.axis]
shape = tuple(shape)
self.shape = shape
y = cuda.empty(shape, dtype=xs[0].dtype)
self.cdimy = y.shape[self.axis]
self.rdim = numpy.prod(shape[self.axis + 1:], dtype=int)
coffset = 0
kernel = cuda.elementwise(
_args, 'COPY(y[idx] = x[i])', 'concat_fwd', preamble=_preamble)
for x in xs:
cdimx = x.shape[self.axis]
kernel(x, y, cdimx, self.cdimy, self.rdim, coffset)
coffset += cdimx
return y,
def backward_cpu(self, xs, gy):
sizes = numpy.array([x.shape[self.axis] for x in xs[:-1]]).cumsum()
return numpy.split(gy[0], sizes, axis=self.axis)
def backward_gpu(self, xs, gy):
gxs = tuple(cuda.empty_like(x) for x in xs)
coffset = 0
kernel = cuda.elementwise(
_args, 'COPY(x[i] = y[idx])', 'concat_bwd', preamble=_preamble)
for gx in gxs:
cdimx = gx.shape[self.axis]
kernel(gx, gy[0], cdimx, self.cdimy, self.rdim, coffset)
coffset += cdimx
return gxs
def concat(xs, axis=1):
"""Concatenates given variables along an axis.
Args:
xs (tuple of Variables): Variables to be concatenated.
axis (int): Axis that the input arrays are concatenated along.
Returns:
~chainer.Variable: Output variable.
"""
return Concat(axis=axis)(*xs)
|
wangdefeng/flink-boot | flink-dynamic-load-class/src/main/java/com/intsmaze/flink/dynamic/base/other/JarLoadService.java | <reponame>wangdefeng/flink-boot<filename>flink-dynamic-load-class/src/main/java/com/intsmaze/flink/dynamic/base/other/JarLoadService.java
package com.intsmaze.flink.dynamic.base.other;
import com.intsmaze.flink.dynamic.DynamicService;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
/**
* @author:YangLiu
* @date:2018年8月8日 上午10:36:10
* @describe: 使用类加载器加载jar包,然后加载指定的类进去虚拟机,将intsmaze-classloadjar这个model打包
*/
public class JarLoadService {
public void init()
{
try {
String path = System.getProperty("user.dir");
System.out.println(path);
String[] jarsName=getJarsName("path");
URL[] us =makeUrl(jarsName);
ClassLoader loader = new URLClassLoader(us);
// 输出类装载器的类型
System.out.println(JarLoadService.class.getClassLoader());
Class c = loader.loadClass("org.intsmaze.classload.service.impl.URLClassLoaderJar");
DynamicService o = (DynamicService) c.newInstance();
o.executeService("i am intsmaze");
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* @author:YangLiu
* @date:2018年8月8日 上午10:48:11
* @describe:模拟根据文件夹得到该文件夹下所有文件的名称
*/
public String[] getJarsName(String path)
{
String[] arr = {"file:d:/intsmaze-classloadjar.jar", "file:d:/intsmaze-classloadjar.jar"};
return arr;
}
/**
* @author:YangLiu
* @date:2018年8月8日 上午10:50:19
* @describe:模拟根据文件名称构造url数组
*/
public URL[] makeUrl(String[] arr) throws MalformedURLException
{
URL[] us = {new URL("file:d:/intsmaze-classloadjar.jar"),new URL("file:d:/intsmaze-classloadjar-v1.jar")};
return us;
}
public static void main(String[] args) throws Exception {
new JarLoadService().init();
}
}
|
OTCdLink/Chiron | modules/chiron/toolbox/src/main/java/com/otcdlink/chiron/toolbox/converter/ConverterException.java | package com.otcdlink.chiron.toolbox.converter;
import com.google.common.base.Converter;
/**
* Use this class to wrap an exception occuring in a {@link Converter#doForward(Object)}
* into an unchecked exception, telling it can be unwrapped with no loss of information.
*/
public final class ConverterException extends RuntimeException {
public ConverterException( final Throwable cause ) {
super( cause ) ;
}
}
|
ghentlivinglab/Personalized-mobility-information-service-solution-3 | frontend/app/tests/karma-jasmine-tests/conf.js | module.exports = function(config){
config.set({
basePath : '../../',
files : [ //Requires all used js files from the app itself aswell as angular modules (angular-resource.js etc.)
'https://ajax.googleapis.com/ajax/libs/jquery/2.2.2/jquery.min.js',
'../assets/javascript/modules/angular.min.js',
'../assets/javascript/modules/*.js',
'tests/karma-jasmine-tests/angular-mocks.js',
'../assets/javascript/*.js',
'https://maps.googleapis.com/maps/api/js?libraries=places&sensor=false&key=<KEY>',
'app.js',
'services/notificationServices/*.js',
'controllers/defaults/*.js',
'controllers/events/*.js',
'controllers/routes/*.js',
'controllers/travels/*.js',
'controllers/users/*.js',
'controllers/pois/*.js',
'translation_tables/*.js',
'directives/*.js',
'views/travels/*.html',
'views/routes/*.html',
'views/pois/*.html',
'views/events/*.html',
'views/users/*.html',
'services/resourceServices/_resources.js',
'services/resourceServices/*.js',
'services/authenticationServices/*.js',
'services/commonServices/*.js',
'tests/karma-jasmine-tests/*Spec.js',
],
autoWatch : true,
frameworks: ['jasmine'],
reporters: [/*'progress',*/'mocha','coverage','html'],
client: {
mocha: {
reporter:'reporter-file',
ui:'tdd'
}
},
coverageReporter : {
dir : 'tests/karma-jasmine-tests/coverage/',
reporters: [
{type: 'html', subdir: 'report-html'},
{type:'lcovonly', subdir:'.'}]
},
htmlReporter: {
outputDir:'karma-jasmine-tests',
reportName:'karma_html'
},
preprocessors: {
'views/travels/*.html':'ng-html2js',
'views/routes/*.html':'ng-html2js',
'views/pois/*.html':'ng-html2js',
'views/users/*.html':'ng-html2js',
'views/events/*.html':'ng-html2js',
'controllers/defaults/*.js': 'coverage',
'controllers/events/*.js': 'coverage',
'controllers/routes/*.js': 'coverage',
'controllers/travels/*.js': 'coverage',
'controllers/users/*.js': 'coverage',
'directives/*.js': 'coverage',
'controllers/pois/*.js': 'coverage',
'translation_tables/*.js': 'coverage',
'services/authenticationServices/*.js': 'coverage',
'services/resourceServices/resources.js': 'coverage',
'services/resourceServices/*.js': 'coverage',
'services/commonServices/*.js': 'coverage'
},
ngHtml2JsPreprocessor: {
prependPrefix: 'app/',
moduleName: 'templates'
},
/*phantomjsLauncher: {
exitOnResourceError:true
},*/
browsers : [/*'PhantomJS',*/'Chrome'],
plugins : [
'karma-ng-html2js-preprocessor',
'karma-chrome-launcher',
'karma-phantomjs-launcher',
'karma-jasmine',
'karma-mocha-reporter',
'karma-html-reporter',
'karma-coverage'/*,
'karma-phantomjs-launcher'*/
]
});
}; |
hasnain1230/Intro-to-Computer-Science-Programs-cpp- | Fibonacci Sequence/src/Fibonacci Sequence.cpp | <gh_stars>0
//============================================================================
// Name : Fibonacci.cpp
// Author : <NAME>
// Version : 1.0.0
// Copyright : Freshman Class of 2020
// Description : This program will output a certain amount of Fibonacci Numbers. The user may chose the how many numbers you want calculated.
//============================================================================
#include <iostream>
using namespace std;
int main() {
string anykey;
int choice, x, y, z;
x = 1;
y = 1;
cout << "This program will output a certain amount of Fibonacci Numbers. You may chose the how many numbers you want calculated." << endl;
cout << endl;
cout << endl;
cout << "Please press any key to begin." << endl;
cin >> anykey;
cout << endl;
cout << endl;
cout << "How many numbers would you like to output?" << endl;
cin >> choice;
cout << endl;
cout << endl;
if (choice == 1)
{
cout << x << " only." << endl;
choice = -100;
}
if (choice == 2)
{
cout << x << " and " << y << " ." << endl;
choice = -100;
}
if (choice > 2)
{
cout << x << ", " << y << ", ";
choice = choice - 2;
}
for (int counter = 0; counter < choice; counter++)
{
z = y + x;
x = y;
y = z;
if (counter < choice - 1)
{
cout << z << ", ";
}
else
{
cout << "and " << z << "." << endl;
}
}
return 0;
}
|
NotDiscordOfficial/Fortnite_SDK | FrontendCamera_Main_classes.h | // BlueprintGeneratedClass FrontendCamera_Main.FrontendCamera_Main_C
// Size: 0x7f0 (Inherited: 0x7e0)
struct AFrontendCamera_Main_C : AFortCameraBase {
struct FPointerToUberGraphFrame UberGraphFrame; // 0x7e0(0x08)
struct UCameraComponent* LoginCamera_1; // 0x7e8(0x08)
void OnActivated(struct AFortPlayerController* PlayerController); // Function FrontendCamera_Main.FrontendCamera_Main_C.OnActivated // (Event|Public|BlueprintEvent) // @ game+0xda7c34
void ExecuteUbergraph_FrontendCamera_Main(int32_t EntryPoint); // Function FrontendCamera_Main.FrontendCamera_Main_C.ExecuteUbergraph_FrontendCamera_Main // (Final|UbergraphFunction|HasDefaults) // @ game+0xda7c34
};
|
avesus/OpenFPGA | openfpga/src/fpga_verilog/fabric_verilog_options.h | <gh_stars>100-1000
#ifndef FABRIC_VERILOG_OPTIONS_H
#define FABRIC_VERILOG_OPTIONS_H
/********************************************************************
* Include header files required by the data structure definition
*******************************************************************/
#include <string>
#include "verilog_port_types.h"
/* Begin namespace openfpga */
namespace openfpga {
/********************************************************************
* Options for Fabric Verilog generator
*******************************************************************/
class FabricVerilogOption {
public: /* Public constructor */
/* Set default options */
FabricVerilogOption();
public: /* Public accessors */
std::string output_directory() const;
bool include_timing() const;
bool explicit_port_mapping() const;
bool compress_routing() const;
e_verilog_default_net_type default_net_type() const;
bool print_user_defined_template() const;
bool verbose_output() const;
public: /* Public mutators */
void set_output_directory(const std::string& output_dir);
void set_include_timing(const bool& enabled);
void set_explicit_port_mapping(const bool& enabled);
void set_compress_routing(const bool& enabled);
void set_print_user_defined_template(const bool& enabled);
void set_default_net_type(const std::string& default_net_type);
void set_verbose_output(const bool& enabled);
private: /* Internal Data */
std::string output_directory_;
bool include_timing_;
bool explicit_port_mapping_;
bool compress_routing_;
bool print_user_defined_template_;
e_verilog_default_net_type default_net_type_;
bool verbose_output_;
};
} /* End namespace openfpga*/
#endif
|
vcellmike/Biosimulators_VCell | vcell-math/src/main/java/jscl/math/MatrixVariable.java | package jscl.math;
public class MatrixVariable extends GenericVariable {
public MatrixVariable(Generic generic) {
super(generic);
}
protected Variable newinstance() {
return new MatrixVariable(null);
}
}
|
jzq84229/RxTools | RxDemo/src/main/java/com/tamsiree/rxdemo/activity/ActivityLoading.java | package com.tamsiree.rxdemo.activity;
import android.os.Bundle;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentPagerAdapter;
import androidx.viewpager.widget.ViewPager;
import com.google.android.material.tabs.TabLayout;
import com.tamsiree.rxdemo.R;
import com.tamsiree.rxdemo.fragment.FragmentLoadingDemo;
import com.tamsiree.rxdemo.fragment.FragmentLoadingWay;
import com.tamsiree.rxkit.RxDeviceTool;
import com.tamsiree.rxui.activity.ActivityBase;
import com.tamsiree.rxui.view.RxTitle;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* @author tamsiree
*/
public class ActivityLoading extends ActivityBase {
@BindView(R.id.rx_title)
RxTitle mRxTitle;
@BindView(R.id.tabs)
TabLayout mTabs;
@BindView(R.id.viewpager)
ViewPager mViewpager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_loading);
ButterKnife.bind(this);
RxDeviceTool.setPortrait(this);
mRxTitle.setLeftFinish(mContext);
mViewpager.setAdapter(new FragmentPagerAdapter(getSupportFragmentManager()) {
String[] titles = new String[]{
"加载的方式", "加载的例子"
};
@Override
public Fragment getItem(int position) {
if (position == 0) {
return FragmentLoadingWay.newInstance();
} else {
return FragmentLoadingDemo.newInstance();
}
}
@Override
public int getCount() {
return 2;
}
@Override
public CharSequence getPageTitle(int position) {
return titles[position];
}
});
mTabs.setupWithViewPager(mViewpager);
}
}
|
old-bin/wot-design | test/unit/Input.spec.js | <filename>test/unit/Input.spec.js
import Input from 'wot-design/packages/input/index'
import { mount } from '@vue/test-utils'
import { testInstall, later } from './utils'
testInstall(Input)
test('placeholder', () => {
let wrapper = mount(Input, {
propsData: {
placeholder: '请输入'
}
})
expect(wrapper.element.querySelector('.wd-input__inner').placeholder).toBe('请输入')
})
test('disabled', () => {
let wrapper = mount(Input, {
propsData: {
disabled: true
}
})
expect(wrapper.contains('.is-disabled')).toBe(true)
})
test('readonly', () => {
let wrapper = mount(Input, {
propsData: {
readonly: true
}
})
expect(wrapper.element.querySelector('.wd-input__inner').readOnly).toBe(true)
})
test('error', () => {
let wrapper = mount(Input, {
propsData: {
error: true
}
})
expect(wrapper.contains('.is-error')).toBe(true)
})
test('clearable', () => {
const clearHandler = jest.fn()
let wrapper = mount(Input, {
propsData: {
clearable: true,
value: '测试'
},
listeners: {
clear: clearHandler
}
})
let clearWrapper = wrapper.find('.wd-icon-error-fill')
expect(clearWrapper).toBeTruthy()
clearWrapper.trigger('click')
expect(clearHandler).toHaveBeenCalled()
})
test('show-password', async () => {
let wrapper = mount(Input, {
propsData: {
showPassword: true
}
})
expect(wrapper.element.querySelector('.wd-input__inner').type).toBe('password')
wrapper.find('.wd-icon-eye-close').trigger('click')
await later()
expect(wrapper.element.querySelector('.wd-icon-view')).toBeTruthy()
})
test('prefix-icon, suffix-icon', () => {
let wrapper = mount(Input, {
propsData: {
prefixIcon: 'wd-icon-person',
suffixIcon: 'wd-icon-tickets'
}
})
expect(wrapper.element.querySelector('.wd-icon-person')).toBeTruthy()
expect(wrapper.element.querySelector('.wd-icon-tickets')).toBeTruthy()
})
test('prefix-icon, suffix-icon, slot', () => {
let wrapper = mount(Input, {
slots: {
prefix: '<i class="test1"></i>',
suffix: '<i class="test2"></i>s'
}
})
expect(wrapper.element.querySelector('.test1')).toBeTruthy()
expect(wrapper.element.querySelector('.test2')).toBeTruthy()
})
test('show-word-limit', () => {
let wrapper = mount(Input, {
propsData: {
maxlength: '20',
showWordLimit: true
}
})
expect(wrapper.element.querySelector('.wd-input__count')).toBeTruthy()
})
test('textarea', () => {
let wrapper = mount(Input, {
propsData: {
type: 'textarea'
}
})
expect(wrapper.element.querySelector('.wd-input__textarea-inner')).toBeTruthy()
})
test('autosize', () => {
let wrapper = mount(Input, {
propsData: {
autosize: true
}
})
expect(wrapper.element.querySelector('.is-text-auto')).toBeTruthy()
let wrapper2 = mount(Input, {
propsData: {
value: '1123',
autosize: {
minRows: 4,
maxRows: 6
}
}
})
expect(wrapper2.element.querySelector('.is-text-auto')).toBeTruthy()
})
test('event', () => {
const focusHandler = jest.fn()
const blurHandler = jest.fn()
const changeHandler = jest.fn()
const inputHandler = jest.fn()
let wrapper = mount(Input, {
listeners: {
focus: focusHandler,
blur: blurHandler,
change: changeHandler,
input: inputHandler
}
})
let inputEl = wrapper.find('.wd-input__inner')
inputEl.trigger('focus')
expect(focusHandler).toHaveBeenCalled()
inputEl.trigger('blur')
expect(blurHandler).toHaveBeenCalled()
inputEl.trigger('change')
expect(changeHandler).toHaveBeenCalled()
inputEl.trigger('input')
expect(inputHandler).toHaveBeenCalled()
})
test('methods', () => {
const blurHandler = jest.fn()
let wrapper = mount(Input, {
listeners: {
blur: blurHandler
}
})
wrapper.vm.blur()
wrapper.vm.select()
})
test('calcTextareaHeight', async () => {
/**
* box-sizing 无法获取,window.getComputedStyle 只模拟出部分功能,自己添加需要的样式属性
*/
const originGetComputedStyle = window.getComputedStyle
window.getComputedStyle = ele => {
const style = originGetComputedStyle(ele)
style._values['box-sizing'] = 'content-box'
return style
}
let wrapper = mount(Input, {
propsData: {
autosize: true
}
})
await later()
expect(parseFloat(wrapper.find('.wd-input__textarea-inner').element.style.height) < 0).toBe(true)
window.getComputedStyle = ele => {
const style = originGetComputedStyle(ele)
style._values['box-sizing'] = 'border-box'
return style
}
let wrapper2 = mount(Input, {
propsData: {
autosize: {
minRows: 2,
maxRows: 4
}
}
})
await later()
expect(parseFloat(wrapper2.find('.wd-input__textarea-inner').element.style.height) < 0).toBe(true)
})
|
zhenchai/pigeon | pigeon-remoting/src/main/java/com/dianping/pigeon/remoting/invoker/concurrent/FutureFactory.java | package com.dianping.pigeon.remoting.invoker.concurrent;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import com.dianping.pigeon.log.Logger;
import com.dianping.pigeon.log.LoggerLoader;
public class FutureFactory {
private static Logger log = LoggerLoader.getLogger(FutureFactory.class);
private static ThreadLocal<Future<?>> threadFuture = new ThreadLocal<Future<?>>();
public static Future<?> getFuture() {
Future<?> future = threadFuture.get();
threadFuture.remove();
return future;
}
public static <T> Future<T> getFuture(Class<T> type) {
Future<T> future = (Future<T>) threadFuture.get();
threadFuture.remove();
return future;
}
public static void setFuture(Future<?> future) {
threadFuture.set(future);
}
public static void remove() {
threadFuture.remove();
}
/**
* 直接返回调用结果,用于异步调用配置情况下的同步调用
*
* @param <T>
* 返回值类型
* @param res
* 返回值类
* @return 调用结果
* @throws InterruptedException
* @throws ExecutionException
*/
public static <T> T getResult(Class<T> res) throws InterruptedException, ExecutionException {
return (T) getFuture().get();
}
/**
* 直接返回调用结果,用于异步调用配置情况下的同步调用
*
* @return 调用结果
* @throws InterruptedException
* @throws ExecutionException
*/
public static Object getResult() throws InterruptedException, ExecutionException {
return getFuture().get();
}
}
|
JLLeitschuh/nb-nodejs | node-projects/src/main/java/org/netbeans/modules/nodejs/NbInfo.java | package org.netbeans.modules.nodejs;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.LinkedHashMap;
import java.util.Map;
import org.netbeans.modules.nodejs.json.ObjectMapperProvider;
import static org.netbeans.modules.nodejs.json.ObjectMapperProvider.STRING_OBJECT_MAP;
import org.openide.filesystems.FileObject;
import org.openide.util.Exceptions;
import org.openide.util.RequestProcessor;
/**
* Encapsulates data stored in .nbinfo next to package.json - things that are
* either machine-specific, netbeans-specific, or have no analogue in
* package.json
*
* @author <NAME>
*/
final class NbInfo implements Runnable {
private final RequestProcessor.Task task;
private final NodeJSProject prj;
private String platform;
private String runArguments;
private boolean initialized;
private volatile boolean fileFound;
NbInfo ( NodeJSProject prj ) {
this.prj = prj;
RequestProcessor rp = NodeJSProject.NODE_JS_PROJECT_THREAD_POOL;
task = rp.create( this );
}
private void checkInit () {
if (!initialized) {
initialized = true;
load();
}
}
void setPlatformName ( String platform ) {
checkInit();
synchronized ( this ) {
this.platform = platform;
}
task.schedule( 1000 );
}
void setRunArguments ( String args ) {
checkInit();
synchronized ( this ) {
this.runArguments = runArguments;
}
task.schedule( 1000 );
}
public String getRunArguments () {
checkInit();
synchronized ( this ) {
return runArguments;
}
}
public String getPlatformName () {
checkInit();
synchronized ( this ) {
return platform;
}
}
boolean hasFile () {
return fileFound;
}
public void run () {
try {
FileObject fo = prj.getProjectDirectory().getFileObject( ".nbinfo" );
if (fo == null) {
fo = prj.getProjectDirectory().createData( ".nbinfo" );
}
Map<String, String> m = new LinkedHashMap<>();
synchronized ( this ) {
if (platform != null) {
m.put( "platformName", platform );
}
if (runArguments != null) {
m.put( "arguments", runArguments );
}
}
try (OutputStream out = fo.getOutputStream()) {
ObjectMapperProvider.newObjectMapper().writeValue( out, m );
}
} catch ( IOException ex ) {
Exceptions.printStackTrace( ex );
}
}
private void load () {
FileObject fo = prj.getProjectDirectory().getFileObject( ".nbinfo" );
if (fo != null) {
fileFound = true;
Map<String, String> loadedData = null;
try (InputStream in = fo.getInputStream()) {
loadedData = ObjectMapperProvider.newObjectMapper().readValue( in, STRING_OBJECT_MAP );
} catch ( IOException ex ) {
Exceptions.printStackTrace( ex );
} finally {
if (loadedData != null) {
synchronized ( this ) {
platform = loadedData.get( "platformName" );
runArguments = loadedData.get( "arguments" );
}
}
}
}
}
}
|
rkrzewski/bnd | aQute.libg/src/aQute/libg/filerepo/FileRepo.java | <filename>aQute.libg/src/aQute/libg/filerepo/FileRepo.java<gh_stars>0
package aQute.libg.filerepo;
import java.io.*;
import java.util.*;
import java.util.regex.*;
import aQute.libg.version.*;
public class FileRepo {
File root;
Pattern REPO_FILE = Pattern.compile("([-a-zA-z0-9_\\.]+)-([0-9\\.]+|latest)\\.(jar|lib)");
public FileRepo(File root) {
this.root = root;
}
/**
* Get a list of URLs to bundles that are constrained by the bsn and
* versionRange.
*/
public File[] get(String bsn, final VersionRange versionRange) throws Exception {
//
// Check if the entry exists
//
File f = new File(root, bsn);
if (!f.isDirectory())
return null;
//
// Iterator over all the versions for this BSN.
// Create a sorted map over the version as key
// and the file as URL as value. Only versions
// that match the desired range are included in
// this list.
//
return f.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
Matcher m = REPO_FILE.matcher(name);
if (!m.matches())
return false;
Version v = new Version(m.group(2));
return versionRange.includes(v);
}
});
}
public List<String> list(String regex) throws Exception {
if (regex == null)
regex = ".*";
final Pattern pattern = Pattern.compile(regex);
String list[] = root.list(new FilenameFilter() {
public boolean accept(File dir, String name) {
Matcher matcher = pattern.matcher(name);
return matcher.matches();
}
});
return Arrays.asList(list);
}
public List<Version> versions(String bsn) throws Exception {
File dir = new File(root, bsn);
final List<Version> versions = new ArrayList<Version>();
dir.list(new FilenameFilter() {
public boolean accept(File dir, String name) {
Matcher m = REPO_FILE.matcher(name);
if (m.matches()) {
versions.add(new Version(m.group(2)));
return true;
}
return false;
}
});
return versions;
}
public File get(String bsn, VersionRange range, int strategy) throws Exception {
File[] files = get(bsn, range);
if (files.length == 0)
return null;
if (files.length == 1)
return files[0];
if (strategy < 0) {
return files[0];
} else
return files[files.length - 1];
}
public File put(String bsn, Version version) {
File dir = new File(bsn);
dir.mkdirs();
File file = new File(dir, bsn + "-" + version.getMajor() + "." + version.getMinor() + "." + version.getMicro());
return file;
}
}
|
Dennisbonke/toaruos | base/usr/include/wchar.h | #pragma once
#include <_cheader.h>
#include <stddef.h>
_Begin_C_Header
extern int wcwidth(wchar_t c);
extern wchar_t * wcsncpy(wchar_t * dest, const wchar_t * src, size_t n);
extern size_t wcslen(const wchar_t * s);
extern int wcscmp(const wchar_t *s1, const wchar_t *s2);
extern wchar_t * wcscat(wchar_t *dest, const wchar_t *src);
extern wchar_t * wcstok(wchar_t * str, const wchar_t * delim, wchar_t ** saveptr);
extern size_t wcsspn(const wchar_t * wcs, const wchar_t * accept);
extern wchar_t *wcspbrk(const wchar_t *wcs, const wchar_t *accept);
extern wchar_t * wcschr(const wchar_t *wcs, wchar_t wc);
extern wchar_t * wcsrchr(const wchar_t *wcs, wchar_t wc);
extern wchar_t * wcsncat(wchar_t *dest, const wchar_t * src, size_t n);
typedef unsigned int wint_t;
_End_C_Header
|
Denisss025/wsfcpp | wsf_c/axis2c/neethi/src/engine.c | <gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <neethi_engine.h>
#include <neethi_assertion_builder.h>
#include <axiom_attribute.h>
/*Private functions*/
static neethi_all_t *
neethi_engine_get_operator_all(
const axutil_env_t *env,
axiom_node_t *node,
axiom_element_t *element);
static neethi_exactlyone_t *
neethi_engine_get_operator_exactlyone(
const axutil_env_t *env,
axiom_node_t *node,
axiom_element_t *element);
static neethi_reference_t *
neethi_engine_get_operator_reference(
const axutil_env_t *env,
axiom_node_t *node,
axiom_element_t *element);
static neethi_policy_t *
neethi_engine_get_operator_neethi_policy(
const axutil_env_t *env,
axiom_node_t *node,
axiom_element_t *element);
static axis2_status_t
neethi_engine_process_operation_element(
const axutil_env_t *env,
neethi_operator_t *neethi_operator,
axiom_node_t *node,
axiom_element_t *element);
static axis2_status_t
neethi_engine_add_policy_component(
const axutil_env_t *env,
neethi_operator_t *container_operator,
neethi_operator_t *component);
static axis2_bool_t
neethi_engine_operator_is_empty(
neethi_operator_t *operator,
const axutil_env_t *env);
static neethi_exactlyone_t *
neethi_engine_compute_resultant_component(
axutil_array_list_t *normalized_inner_components,
neethi_operator_type_t type,
const axutil_env_t *env);
static axutil_array_list_t *
neethi_engine_operator_get_components(
neethi_operator_t *operator,
const axutil_env_t *env);
static neethi_exactlyone_t *
neethi_engine_normalize_operator(
neethi_operator_t *operator,
neethi_registry_t *registry,
axis2_bool_t deep,
const axutil_env_t *env);
static neethi_exactlyone_t *
neethi_engine_get_cross_product(
neethi_exactlyone_t *exactlyone1,
neethi_exactlyone_t *exactlyone2,
const axutil_env_t *env);
static void
neethi_engine_clear_element_attributes(
axutil_hash_t *attr_hash,
const axutil_env_t *env);
/*Implementations*/
/*This is the function which is called from outside*/
AXIS2_EXTERN neethi_policy_t *AXIS2_CALL
neethi_engine_get_policy(
const axutil_env_t *env,
axiom_node_t *node,
axiom_element_t *element)
{
/* This function will be called recursively */
return neethi_engine_get_operator_neethi_policy(env, node, element);
}
static neethi_all_t *
neethi_engine_get_operator_all(
const axutil_env_t *env,
axiom_node_t *node,
axiom_element_t *element)
{
neethi_all_t *all = NULL;
neethi_operator_t *neethi_operator = NULL;
axis2_status_t status = AXIS2_SUCCESS;
all = neethi_all_create(env);
if(!all)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_operator = neethi_operator_create(env);
if(!neethi_operator)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_operator_set_value(neethi_operator, env, all, OPERATOR_TYPE_ALL);
status = neethi_engine_process_operation_element(env, neethi_operator, node, element);
neethi_operator_set_value_null(neethi_operator, env);
neethi_operator_free(neethi_operator, env);
neethi_operator = NULL;
if(status != AXIS2_SUCCESS)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_ALL_CREATION_FAILED, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "[neethi] All creation failed");
neethi_all_free(all, env);
all = NULL;
return NULL;
}
return all;
}
static neethi_exactlyone_t *
neethi_engine_get_operator_exactlyone(
const axutil_env_t *env,
axiom_node_t *node,
axiom_element_t *element)
{
neethi_exactlyone_t *exactlyone = NULL;
neethi_operator_t *neethi_operator = NULL;
axis2_status_t status = AXIS2_SUCCESS;
exactlyone = neethi_exactlyone_create(env);
if(!exactlyone)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_operator = neethi_operator_create(env);
if(!neethi_operator)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_operator_set_value(neethi_operator, env, exactlyone, OPERATOR_TYPE_EXACTLYONE);
status = neethi_engine_process_operation_element(env, neethi_operator, node, element);
neethi_operator_set_value_null(neethi_operator, env);
neethi_operator_free(neethi_operator, env);
neethi_operator = NULL;
if(status != AXIS2_SUCCESS)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_EXACTLYONE_CREATION_FAILED, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "[neethi] Exactlyone creation failed.");
neethi_exactlyone_free(exactlyone, env);
exactlyone = NULL;
return NULL;
}
return exactlyone;
}
neethi_reference_t *
neethi_engine_get_operator_reference(
const axutil_env_t *env,
axiom_node_t *node,
axiom_element_t *element)
{
neethi_reference_t *reference = NULL;
axutil_qname_t *qname = NULL;
axis2_char_t *attribute_value = NULL;
(void)node;
reference = neethi_reference_create(env);
if(!reference)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
qname = axutil_qname_create(env, NEETHI_URI, NULL, NULL);
if(!qname)
{
return NULL;
}
attribute_value = axiom_element_get_attribute_value(element, env, qname);
if(attribute_value)
{
neethi_reference_set_uri(reference, env, attribute_value);
}
return reference;
}
/* This function will be called when we encounter a wsp:Policy
* element */
static neethi_policy_t *
neethi_engine_get_operator_neethi_policy(
const axutil_env_t *env,
axiom_node_t *node,
axiom_element_t *element)
{
neethi_policy_t *neethi_policy = NULL;
neethi_operator_t *neethi_operator = NULL;
axis2_status_t status = AXIS2_SUCCESS;
/* Creates a policy struct */
neethi_policy = neethi_policy_create(env);
if(!neethi_policy)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
/* Then we wrap it in a neethi_operator */
neethi_operator = neethi_operator_create(env);
if(!neethi_operator)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_operator_set_value(neethi_operator, env, neethi_policy, OPERATOR_TYPE_POLICY);
/* This function will do all the processing and build the
* policy object model */
status = neethi_engine_process_operation_element(env, neethi_operator, node, element);
/* To prevent freeing the policy object from the operator
* we set it to null. This object will be freed from a parent
* or from an outsider who creates a policy object */
neethi_operator_set_value_null(neethi_operator, env);
neethi_operator_free(neethi_operator, env);
neethi_operator = NULL;
if(status != AXIS2_SUCCESS)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_POLICY_CREATION_FAILED, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "[neethi] Policy creation failed.");
neethi_policy_free(neethi_policy, env);
neethi_policy = NULL;
return NULL;
}
return neethi_policy;
}
/* This function will construct the policy objecy model by
* filling the component array_list inside the passing
* policy operator */
static axis2_status_t
neethi_engine_process_operation_element(
const axutil_env_t *env,
neethi_operator_t *neethi_operator,
axiom_node_t *node,
axiom_element_t *element)
{
neethi_operator_type_t type;
axiom_element_t *child_element = NULL;
axiom_node_t *child_node = NULL;
axiom_children_iterator_t *children_iter = NULL;
void *value = NULL;
type = neethi_operator_get_type(neethi_operator, env);
value = neethi_operator_get_value(neethi_operator, env);
if(type == OPERATOR_TYPE_POLICY)
{
/* wsp:Policy element can have any number of attributes
* we will store them in a hash from the uri and localname */
axutil_hash_t *attributes = axiom_element_extract_attributes(element, env, node);
if(attributes)
{
axutil_hash_index_t *hi = NULL;
/* When creating the policy object we created the hash */
axutil_hash_t *ht = neethi_policy_get_attributes((neethi_policy_t *)value, env);
if(!ht)
{
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "[neethi] Policy hash map creation failed.");
return AXIS2_FAILURE;
}
for(hi = axutil_hash_first(attributes, env); hi; hi = axutil_hash_next(env, hi))
{
axis2_char_t *key = NULL;
void *val = NULL;
axutil_qname_t *qname = NULL;
axis2_char_t *attr_val = NULL;
axiom_namespace_t *ns = NULL;
axis2_char_t *ns_uri = NULL;
axiom_attribute_t *om_attr = NULL;
axutil_hash_this(hi, NULL, NULL, &val);
if(val)
{
om_attr = (axiom_attribute_t *)val;
ns = axiom_attribute_get_namespace(om_attr, env);
if(ns)
{
ns_uri = axiom_namespace_get_uri(ns, env);
}
qname = axutil_qname_create(env, axiom_attribute_get_localname(om_attr, env),
ns_uri, NULL);
if(qname)
{
key = axutil_qname_to_string(qname, env);
if(key)
{
attr_val = axiom_attribute_get_value(om_attr, env);
if(attr_val)
{
/* axutil_qname_free will free the returned key
* of the qname so will duplicate it when storing */
axutil_hash_set(ht, axutil_strdup(env, key), AXIS2_HASH_KEY_STRING,
axutil_strdup(env, attr_val));
}
}
axutil_qname_free(qname, env);
}
}
}
/* axiom_element_extract_attributes method will always returns
* a cloned copy, so we need to free it after we have done with it */
neethi_engine_clear_element_attributes(attributes, env);
attributes = NULL;
}
}
children_iter = axiom_element_get_children(element, env, node);
if(children_iter)
{
while(axiom_children_iterator_has_next(children_iter, env))
{
/* Extract the element and check the namespace. If the namespace
* is in ws_policy then we call the relevent operator builder
* otherwise we will call the assertion_builder */
child_node = axiom_children_iterator_next(children_iter, env);
if(child_node)
{
if(axiom_node_get_node_type(child_node, env) == AXIOM_ELEMENT)
{
child_element = (axiom_element_t *)axiom_node_get_data_element(child_node, env);
if(child_element)
{
axiom_namespace_t *namespace = NULL;
axis2_char_t *uri = NULL;
axis2_char_t *local_name = NULL;
neethi_operator_t *operator = NULL;
local_name = axiom_element_get_localname(child_element, env);
namespace = axiom_element_get_namespace(child_element, env, child_node);
if(!namespace)
{
AXIS2_ERROR_SET(env->error,
AXIS2_ERROR_NEETHI_ELEMENT_WITH_NO_NAMESPACE, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] Element with no namespace");
return AXIS2_FAILURE;
}
uri = axiom_namespace_get_uri(namespace, env);
if(!uri)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_INVALID_EMPTY_NAMESPACE_URI,
AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] Invalid Empty namespace uri.");
return AXIS2_FAILURE;
}
if((axutil_strcmp(uri, NEETHI_NAMESPACE) == 0) || (axutil_strcmp(uri,
NEETHI_POLICY_15_NAMESPACE) == 0))
{
/* Looking at the localname we will call the relevent
* operator function. After that the newly created
* object is wrapped in a neethi_operator and stored in
* the parent's component list */
if(axutil_strcmp(local_name, NEETHI_POLICY) == 0)
{
neethi_policy_t *neethi_policy = NULL;
neethi_policy = neethi_engine_get_operator_neethi_policy(env,
child_node, child_element);
if(neethi_policy)
{
operator = neethi_operator_create(env);
neethi_operator_set_value(operator, env, neethi_policy,
OPERATOR_TYPE_POLICY);
neethi_engine_add_policy_component(env, neethi_operator,
operator);
}
else
{
AXIS2_ERROR_SET(env->error,
AXIS2_ERROR_NEETHI_POLICY_CREATION_FAILED_FROM_ELEMENT,
AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] Policy creation failed from element.");
return AXIS2_FAILURE;
}
}
else if(axutil_strcmp(local_name, NEETHI_ALL) == 0)
{
neethi_all_t *all = NULL;
all
= neethi_engine_get_operator_all(env, child_node, child_element);
if(all)
{
operator = neethi_operator_create(env);
neethi_operator_set_value(operator, env, all, OPERATOR_TYPE_ALL);
neethi_engine_add_policy_component(env, neethi_operator,
operator);
}
else
{
AXIS2_ERROR_SET(env->error,
AXIS2_ERROR_NEETHI_ALL_CREATION_FAILED_FROM_ELEMENT,
AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] All creation failed from element.");
return AXIS2_FAILURE;
}
}
else if(axutil_strcmp(local_name, NEETHI_EXACTLYONE) == 0)
{
neethi_exactlyone_t *exactlyone = NULL;
exactlyone = neethi_engine_get_operator_exactlyone(env, child_node,
child_element);
if(exactlyone)
{
operator = neethi_operator_create(env);
neethi_operator_set_value(operator, env, exactlyone,
OPERATOR_TYPE_EXACTLYONE);
neethi_engine_add_policy_component(env, neethi_operator,
operator);
}
else
{
AXIS2_ERROR_SET(env->error,
AXIS2_ERROR_NEETHI_EXACTLYONE_CREATION_FAILED_FROM_ELEMENT,
AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] Exactlyone creation failed from element.");
return AXIS2_FAILURE;
}
}
else if(axutil_strcmp(local_name, NEETHI_REFERENCE) == 0)
{
neethi_reference_t *reference = NULL;
reference = neethi_engine_get_operator_reference(env, child_node,
child_element);
if(reference)
{
operator = neethi_operator_create(env);
neethi_operator_set_value(operator, env, reference,
OPERATOR_TYPE_REFERENCE);
neethi_engine_add_policy_component(env, neethi_operator,
operator);
}
else
{
AXIS2_ERROR_SET(env->error,
AXIS2_ERROR_NEETHI_REFERENCE_CREATION_FAILED_FROM_ELEMENT,
AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] Reference cretion failed from element.");
return AXIS2_FAILURE;
}
}
}
else
{
/* This is an assertion in a different domain. Assertion builder
* should be called and that will call the relevent assertion builder
* after looking at the localname and the namespace */
neethi_assertion_t *assertion = NULL;
assertion = neethi_assertion_builder_build(env, child_node,
child_element);
if(assertion)
{
operator = neethi_operator_create(env);
neethi_operator_set_value(operator, env, assertion,
OPERATOR_TYPE_ASSERTION);
neethi_engine_add_policy_component(env, neethi_operator, operator);
neethi_assertion_set_node(assertion, env, child_node);
}
else
{
AXIS2_ERROR_SET(env->error,
AXIS2_ERROR_NEETHI_ASSERTION_CREATION_FAILED_FROM_ELEMENT,
AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] Assertion creation failed from element.");
return AXIS2_FAILURE;
}
}
}
}
}
}
return AXIS2_SUCCESS;
}
else
return AXIS2_FAILURE;
}
/* After looking at the operator_type this function will
* call the relevent neethi operator's add operator
* function */
static axis2_status_t
neethi_engine_add_policy_component(
const axutil_env_t *env,
neethi_operator_t *container_operator,
neethi_operator_t *component)
{
neethi_operator_type_t type;
void *value = NULL;
neethi_policy_t *neethi_policy = NULL;
neethi_exactlyone_t *exactlyone = NULL;
neethi_all_t *all = NULL;
neethi_assertion_t *assertion = NULL;
type = neethi_operator_get_type(container_operator, env);
value = neethi_operator_get_value(container_operator, env);
if(value)
{
switch(type)
{
case OPERATOR_TYPE_POLICY:
neethi_policy = (neethi_policy_t *)value;
neethi_policy_add_operator(neethi_policy, env, component);
break;
case OPERATOR_TYPE_ALL:
all = (neethi_all_t *)value;
neethi_all_add_operator(all, env, component);
break;
case OPERATOR_TYPE_EXACTLYONE:
exactlyone = (neethi_exactlyone_t *)value;
neethi_exactlyone_add_operator(exactlyone, env, component);
break;
case OPERATOR_TYPE_UNKNOWN:
return AXIS2_FAILURE;
break;
case OPERATOR_TYPE_ASSERTION:
assertion = (neethi_assertion_t *)value;
neethi_assertion_add_operator(assertion, env, component);
break;
case OPERATOR_TYPE_REFERENCE:
break;
}
return AXIS2_SUCCESS;
}
else
return AXIS2_FAILURE;
}
/***************************************/
/*This function is only for testing*
*Remove it later*/
void
check_neethi_policy(
neethi_policy_t *neethi_policy,
const axutil_env_t *env)
{
axutil_array_list_t *list = NULL;
neethi_operator_t *op = NULL;
neethi_operator_type_t type;
list = neethi_policy_get_policy_components(neethi_policy, env);
if(axutil_array_list_size(list, env) > 1)
{
return;
}
op = (neethi_operator_t *)axutil_array_list_get(list, env, 0);
type = neethi_operator_get_type(op, env);
if(type == OPERATOR_TYPE_EXACTLYONE)
{
void *value = neethi_operator_get_value(op, env);
if(value)
{
return;
}
}
else
{
return;
}
}
/************************************************/
/*
Following function will normalize accorading to the
WS-Policy spec. Normalize policy is in the following
format.
<wsp:Policy>
<wsp:ExactlyOne>
( <wsp:All> ( <Assertion …> … </Assertion> )* </wsp:All> )*
</wsp:ExactlyOne>
</wsp:Policy>
*/
AXIS2_EXTERN neethi_policy_t *AXIS2_CALL
neethi_engine_get_normalize(
const axutil_env_t *env,
axis2_bool_t deep,
neethi_policy_t *neethi_policy)
{
/* In the first call we pass the registry as null.*/
return neethi_engine_normalize(env, neethi_policy, NULL, deep);
}
AXIS2_EXTERN neethi_policy_t *AXIS2_CALL
neethi_engine_normalize(
const axutil_env_t *env,
neethi_policy_t *neethi_policy,
neethi_registry_t *registry,
axis2_bool_t deep)
{
neethi_policy_t *resultant_neethi_policy = NULL;
neethi_operator_t *operator = NULL;
neethi_operator_t *component = NULL;
neethi_exactlyone_t *exactlyone = NULL;
axis2_char_t *policy_name = NULL;
axis2_char_t *policy_id = NULL;
/* Normalize policy will be contained in the new policy
* created below */
resultant_neethi_policy = neethi_policy_create(env);
if(!resultant_neethi_policy)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
policy_name = neethi_policy_get_name(neethi_policy, env);
if(policy_name)
{
neethi_policy_set_name(resultant_neethi_policy, env, policy_name);
}
policy_id = neethi_policy_get_id(neethi_policy, env);
if(policy_id)
{
neethi_policy_set_id(resultant_neethi_policy, env, policy_id);
}
operator = neethi_operator_create(env);
if(!operator)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_operator_set_value(operator, env, neethi_policy, OPERATOR_TYPE_POLICY);
/* When we call the normalization it should always return an exactlyone as the
* out put. */
exactlyone = neethi_engine_normalize_operator(operator, registry, deep, env);
/* We are frreing the operator used to wrap the object */
neethi_operator_set_value_null(operator, env);
neethi_operator_free(operator, env);
operator = NULL;
/* This exactlyone is set as the first component of the
* normalized policy */
if(exactlyone)
{
component = neethi_operator_create(env);
neethi_operator_set_value(component, env, exactlyone, OPERATOR_TYPE_EXACTLYONE);
neethi_policy_add_operator(resultant_neethi_policy, env, component);
return resultant_neethi_policy;
}
else
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_NORMALIZATION_FAILED, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "[neethi] Normalization failed.");
return NULL;
}
}
AXIS2_EXTERN neethi_policy_t *AXIS2_CALL
neethi_engine_merge(
const axutil_env_t *env,
neethi_policy_t *neethi_policy1,
neethi_policy_t *neethi_policy2)
{
neethi_exactlyone_t *exactlyone1 = NULL;
neethi_exactlyone_t *exactlyone2 = NULL;
neethi_exactlyone_t *exactlyone = NULL;
neethi_policy_t *neethi_policy = NULL;
neethi_operator_t *component = NULL;
exactlyone1 = neethi_policy_get_exactlyone(neethi_policy1, env);
exactlyone2 = neethi_policy_get_exactlyone(neethi_policy2, env);
if(!exactlyone1 || !exactlyone2)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_WRONG_INPUT_FOR_MERGE, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "[neethi] Wrong input for merge.");
return NULL;
}
exactlyone = neethi_engine_get_cross_product(exactlyone1, exactlyone2, env);
if(exactlyone)
{
neethi_policy = neethi_policy_create(env);
component = neethi_operator_create(env);
if(!neethi_policy || !component)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_operator_set_value(component, env, exactlyone, OPERATOR_TYPE_EXACTLYONE);
neethi_policy_add_operator(neethi_policy, env, component);
return neethi_policy;
}
else
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_CROSS_PRODUCT_FAILED, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "[neethi] Cross product failed.");
return NULL;
}
}
static axis2_bool_t
neethi_engine_operator_is_empty(
neethi_operator_t *operator,
const axutil_env_t *env)
{
neethi_operator_type_t type;
void *value = NULL;
neethi_policy_t *neethi_policy = NULL;
neethi_exactlyone_t *exactlyone = NULL;
neethi_all_t *all = NULL;
neethi_assertion_t *assertion = NULL;
type = neethi_operator_get_type(operator, env);
value = neethi_operator_get_value(operator, env);
if(value)
{
switch(type)
{
case OPERATOR_TYPE_POLICY:
neethi_policy = (neethi_policy_t *)value;
return neethi_policy_is_empty(neethi_policy, env);
break;
case OPERATOR_TYPE_ALL:
all = (neethi_all_t *)value;
return neethi_all_is_empty(all, env);
break;
case OPERATOR_TYPE_EXACTLYONE:
exactlyone = (neethi_exactlyone_t *)value;
return neethi_exactlyone_is_empty(exactlyone, env);
break;
case OPERATOR_TYPE_UNKNOWN:
return AXIS2_FALSE;
break;
case OPERATOR_TYPE_ASSERTION:
assertion = (neethi_assertion_t *)value;
return neethi_assertion_is_empty(assertion, env);
break;
case OPERATOR_TYPE_REFERENCE:
break;
}
return AXIS2_FALSE;
}
else
return AXIS2_FALSE;
}
static axutil_array_list_t *
neethi_engine_operator_get_components(
neethi_operator_t *operator,
const axutil_env_t *env)
{
neethi_operator_type_t type;
void *value = NULL;
neethi_policy_t *neethi_policy = NULL;
neethi_exactlyone_t *exactlyone = NULL;
neethi_all_t *all = NULL;
neethi_assertion_t *assertion = NULL;
type = neethi_operator_get_type(operator, env);
value = neethi_operator_get_value(operator, env);
if(value)
{
switch(type)
{
case OPERATOR_TYPE_POLICY:
neethi_policy = (neethi_policy_t *)value;
return neethi_policy_get_policy_components(neethi_policy, env);
break;
case OPERATOR_TYPE_ALL:
all = (neethi_all_t *)value;
return neethi_all_get_policy_components(all, env);
break;
case OPERATOR_TYPE_EXACTLYONE:
exactlyone = (neethi_exactlyone_t *)value;
return neethi_exactlyone_get_policy_components(exactlyone, env);
break;
case OPERATOR_TYPE_UNKNOWN:
return NULL;
break;
case OPERATOR_TYPE_ASSERTION:
assertion = (neethi_assertion_t *)value;
return neethi_assertion_get_policy_components(assertion, env);
break;
case OPERATOR_TYPE_REFERENCE:
break;
}
}
return NULL;
}
static neethi_exactlyone_t *
neethi_engine_normalize_operator(
neethi_operator_t *operator,
neethi_registry_t *registry,
axis2_bool_t deep,
const axutil_env_t *env)
{
axutil_array_list_t *child_component_list = NULL;
neethi_operator_t *child_component = NULL;
axutil_array_list_t *arraylist = NULL;
int i = 0;
neethi_operator_type_t type = neethi_operator_get_type(operator, env);
if(neethi_engine_operator_is_empty(operator, env))
{
/* If this is an empty operator we just add
* an exactlyone and all */
neethi_exactlyone_t *exactlyone = NULL;
exactlyone = neethi_exactlyone_create(env);
if(!exactlyone)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
if(type != OPERATOR_TYPE_EXACTLYONE)
{
neethi_all_t *all = NULL;
neethi_operator_t *component = NULL;
all = neethi_all_create(env);
component = neethi_operator_create(env);
if(!all || !component)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_operator_set_value(component, env, all, OPERATOR_TYPE_ALL);
neethi_exactlyone_add_operator(exactlyone, env, component);
}
return exactlyone;
}
child_component_list = axutil_array_list_create(env, 0);
arraylist = neethi_engine_operator_get_components(operator, env);
/* Here we are recursively normalize each and every component */
for(i = 0; i < axutil_array_list_size(arraylist, env); i++)
{
neethi_operator_type_t component_type;
child_component = (neethi_operator_t *)axutil_array_list_get(arraylist, env, i);
component_type = neethi_operator_get_type(child_component, env);
if(component_type == OPERATOR_TYPE_ASSERTION)
{
/*Assertion normalization part comes here */
if(deep)
{
return NULL;
}
else
{
neethi_exactlyone_t *exactlyone = NULL;
neethi_all_t *all = NULL;
neethi_operator_t *op = NULL;
exactlyone = neethi_exactlyone_create(env);
all = neethi_all_create(env);
op = neethi_operator_create(env);
if(!all || !op || !exactlyone)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
/* We wrap everything inside an exactlyone */
neethi_all_add_operator(all, env, child_component);
neethi_operator_set_value(op, env, all, OPERATOR_TYPE_ALL);
neethi_exactlyone_add_operator(exactlyone, env, op);
axutil_array_list_add(child_component_list, env, exactlyone);
}
}
else if(component_type == OPERATOR_TYPE_POLICY)
{
neethi_policy_t *neethi_policy = NULL;
neethi_all_t *all = NULL;
axutil_array_list_t *children = NULL;
neethi_operator_t *to_normalize = NULL;
neethi_exactlyone_t *exactlyone = NULL;
all = neethi_all_create(env);
if(!all)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_policy = (neethi_policy_t *)neethi_operator_get_value(child_component, env);
if(neethi_policy)
{
children = neethi_policy_get_policy_components(neethi_policy, env);
if(children)
{
neethi_all_add_policy_components(all, children, env);
to_normalize = neethi_operator_create(env);
if(!to_normalize)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_operator_set_value(to_normalize, env, all, OPERATOR_TYPE_ALL);
exactlyone
= neethi_engine_normalize_operator(to_normalize, registry, deep, env);
if(exactlyone)
{
axutil_array_list_add(child_component_list, env, exactlyone);
}
}
else
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_NO_CHILDREN_POLICY_COMPONENTS,
AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] No children policy components");
return NULL;
}
}
}
else if(component_type == OPERATOR_TYPE_REFERENCE)
{
/* If the operator is a policy reference we will
* extract the relevent policy from the uri and
* normalize as we are doing for a neethi_policy
* object */
neethi_reference_t *policy_ref = NULL;
axis2_char_t *uri = NULL;
neethi_policy_t *policy = NULL;
neethi_all_t *all = NULL;
axutil_array_list_t *children = NULL;
neethi_operator_t *to_normalize = NULL;
neethi_exactlyone_t *exactlyone = NULL;
policy_ref = (neethi_reference_t *)neethi_operator_get_value(child_component, env);
uri = neethi_reference_get_uri(policy_ref, env);
if(!uri)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_URI_NOT_SPECIFIED, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "[neethi] Uri not specified");
return NULL;
}
if(!registry)
{
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] Cannot resolve the reference.Registry Not provided");
return NULL;
}
policy = neethi_registry_lookup(registry, env, uri);
if(!policy)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_NO_ENTRY_FOR_THE_GIVEN_URI,
AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "[neethi] No entry for the given uri");
return NULL;
}
neethi_operator_set_value(child_component, env, policy, OPERATOR_TYPE_POLICY);
all = neethi_all_create(env);
if(!all)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
policy = (neethi_policy_t *)neethi_operator_get_value(child_component, env);
if(policy)
{
children = neethi_policy_get_policy_components(policy, env);
if(children)
{
neethi_all_add_policy_components(all, children, env);
to_normalize = neethi_operator_create(env);
if(!to_normalize)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_operator_set_value(to_normalize, env, all, OPERATOR_TYPE_ALL);
exactlyone
= neethi_engine_normalize_operator(to_normalize, registry, deep, env);
if(exactlyone)
{
axutil_array_list_add(child_component_list, env, exactlyone);
}
}
else
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_NO_CHILDREN_POLICY_COMPONENTS,
AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] No children policy components");
return NULL;
}
}
}
else
{
neethi_exactlyone_t *exactlyone = NULL;
exactlyone = neethi_engine_normalize_operator(child_component, registry, deep, env);
if(exactlyone)
{
axutil_array_list_add(child_component_list, env, exactlyone);
}
}
}
/* So at this point we have set of exactlyones in the array_list, So we will
* compute one exactlyone out of those after the following call */
return neethi_engine_compute_resultant_component(child_component_list, type, env);
}
/* This function will return a single exactlyone from all the
* components in the list */
static neethi_exactlyone_t *
neethi_engine_compute_resultant_component(
axutil_array_list_t * normalized_inner_components,
neethi_operator_type_t type,
const axutil_env_t * env)
{
neethi_exactlyone_t *exactlyone = NULL;
int size = 0;
if(normalized_inner_components)
{
size = axutil_array_list_size(normalized_inner_components, env);
}
if(type == OPERATOR_TYPE_EXACTLYONE)
{
/* If the operator is an exactlyone then we get all the components
* in the exatlyones and add them to a newly created exactlyone */
int i = 0;
neethi_exactlyone_t *inner_exactlyone = NULL;
exactlyone = neethi_exactlyone_create(env);
for(i = 0; i < size; i++)
{
inner_exactlyone = (neethi_exactlyone_t *)axutil_array_list_get(
normalized_inner_components, env, i);
if(inner_exactlyone)
{
neethi_exactlyone_add_policy_components(exactlyone,
neethi_exactlyone_get_policy_components(inner_exactlyone, env), env);
}
else
{
AXIS2_ERROR_SET(env->error,
AXIS2_ERROR_NEETHI_EXACTLYONE_NOT_FOUND_IN_NORMALIZED_POLICY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] Exactlyone not found in normalized policy");
return NULL;
}
}
}
else if(type == OPERATOR_TYPE_POLICY || type == OPERATOR_TYPE_ALL)
{
/* Here arry_list contains one exactlyone means this operator
* is already normalized. So we will return that. Otherwise we
* will get the crossproduct. */
if(size > 1)
{
/* Get the first one and do the cross product with other
* components */
int i = 0;
exactlyone = (neethi_exactlyone_t *)axutil_array_list_get(normalized_inner_components,
env, 0);
if(!exactlyone)
{
AXIS2_ERROR_SET(env->error,
AXIS2_ERROR_NEETHI_EXACTLYONE_NOT_FOUND_IN_NORMALIZED_POLICY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] Exactlyone not found in normalized policy");
return NULL;
}
if(!neethi_exactlyone_is_empty(exactlyone, env))
{
neethi_exactlyone_t *current_exactlyone = NULL;
i = 1;
for(i = 1; i < size; i++)
{
current_exactlyone = (neethi_exactlyone_t *)axutil_array_list_get(
normalized_inner_components, env, i);
if(!current_exactlyone)
{
AXIS2_ERROR_SET(env->error,
AXIS2_ERROR_NEETHI_EXACTLYONE_NOT_FOUND_IN_NORMALIZED_POLICY,
AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] Exactlyone not found in normalized policy");
return NULL;
}
if(neethi_exactlyone_is_empty(current_exactlyone, env))
{
exactlyone = current_exactlyone;
break;
}
else
{
neethi_exactlyone_t *temp = NULL;
neethi_exactlyone_t *temp1 = NULL;
temp = exactlyone;
temp1 = current_exactlyone;
exactlyone = neethi_engine_get_cross_product(exactlyone,
current_exactlyone, env);
neethi_exactlyone_free(temp, env);
neethi_exactlyone_free(temp1, env);
temp = NULL;
temp1 = NULL;
}
}
}
else
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_EXACTLYONE_IS_EMPTY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "[neethi] Exactlyone is Empty");
return NULL;
}
}
else
{
exactlyone = (neethi_exactlyone_t *)axutil_array_list_get(normalized_inner_components,
env, 0);
}
}
axutil_array_list_free(normalized_inner_components, env);
normalized_inner_components = NULL;
return exactlyone;
}
/* The cross product will return all the different combinations
* of alternatives and put them into one exactlyone */
static neethi_exactlyone_t *
neethi_engine_get_cross_product(
neethi_exactlyone_t *exactlyone1,
neethi_exactlyone_t *exactlyone2,
const axutil_env_t *env)
{
neethi_exactlyone_t *cross_product = NULL;
neethi_all_t *cross_product_all = NULL;
neethi_all_t *current_all1 = NULL;
neethi_all_t *current_all2 = NULL;
axutil_array_list_t *array_list1 = NULL;
axutil_array_list_t *array_list2 = NULL;
neethi_operator_t *component = NULL;
int i = 0;
int j = 0;
cross_product = neethi_exactlyone_create(env);
if(!cross_product)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
array_list1 = neethi_exactlyone_get_policy_components(exactlyone1, env);
array_list2 = neethi_exactlyone_get_policy_components(exactlyone2, env);
if(!array_list1 || !array_list2)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NEETHI_NO_CHILDREN_POLICY_COMPONENTS, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "[neethi] No children policy components");
return NULL;
}
for(i = 0; i < axutil_array_list_size(array_list1, env); i++)
{
current_all1 = (neethi_all_t *)neethi_operator_get_value(
(neethi_operator_t *)axutil_array_list_get(array_list1, env, i), env);
if(!current_all1)
{
AXIS2_ERROR_SET(env->error,
AXIS2_ERROR_NEETHI_ALL_NOT_FOUND_WHILE_GETTING_CROSS_PRODUCT, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] All not found while getting cross product");
return NULL;
}
for(j = 0; j < axutil_array_list_size(array_list2, env); j++)
{
current_all2 = (neethi_all_t *)neethi_operator_get_value(
(neethi_operator_t *)axutil_array_list_get(array_list2, env, j), env);
if(!current_all2)
{
AXIS2_ERROR_SET(env->error,
AXIS2_ERROR_NEETHI_ALL_NOT_FOUND_WHILE_GETTING_CROSS_PRODUCT, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI,
"[neethi] All not found while getting cross product");
return NULL;
}
cross_product_all = neethi_all_create(env);
if(!cross_product_all)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_all_add_policy_components(cross_product_all, neethi_all_get_policy_components(
current_all1, env), env);
neethi_all_add_policy_components(cross_product_all, neethi_all_get_policy_components(
current_all2, env), env);
component = neethi_operator_create(env);
if(!component)
{
AXIS2_ERROR_SET(env->error, AXIS2_ERROR_NO_MEMORY, AXIS2_FAILURE);
AXIS2_LOG_ERROR(env->log, AXIS2_LOG_SI, "Out of memory");
return NULL;
}
neethi_operator_set_value(component, env, cross_product_all, OPERATOR_TYPE_ALL);
neethi_exactlyone_add_operator(cross_product, env, component);
}
}
return cross_product;
}
/*These functions are for serializing a policy object*/
AXIS2_EXTERN axiom_node_t *AXIS2_CALL
neethi_engine_serialize(
neethi_policy_t *policy,
const axutil_env_t *env)
{
return neethi_policy_serialize(policy, NULL, env);
}
static void
neethi_engine_clear_element_attributes(
axutil_hash_t *attr_hash,
const axutil_env_t *env)
{
axutil_hash_index_t *hi = NULL;
for(hi = axutil_hash_first(attr_hash, env); hi; hi = axutil_hash_next(env, hi))
{
void *val = NULL;
axutil_hash_this(hi, NULL, NULL, &val);
if(val)
{
axiom_attribute_free((axiom_attribute_t *)val, env);
val = NULL;
}
}
axutil_hash_free(attr_hash, env);
attr_hash = NULL;
return;
}
|
Bhuvanesh1208/ruby2.6.1 | lib/ruby/2.6.0/rdoc/markup/formatter_test_case.rb | # frozen_string_literal: true
require 'minitest/unit'
##
# Test case for creating new RDoc::Markup formatters. See
# test/test_rdoc_markup_to_*.rb for examples.
#
# This test case adds a variety of tests to your subclass when
# #add_visitor_tests is called. Most tests set up a scenario then call a
# method you will provide to perform the assertion on the output.
#
# Your subclass must instantiate a visitor and assign it to <tt>@to</tt>.
#
# For example, test_accept_blank_line sets up a RDoc::Markup::BlockLine then
# calls accept_blank_line on your visitor. You are responsible for asserting
# that the output is correct.
#
# Example:
#
# class TestRDocMarkupToNewFormat < RDoc::Markup::FormatterTestCase
#
# add_visitor_tests
#
# def setup
# super
#
# @to = RDoc::Markup::ToNewFormat.new
# end
#
# def accept_blank_line
# assert_equal :junk, @to.res.join
# end
#
# # ...
#
# end
class RDoc::Markup::FormatterTestCase < RDoc::TestCase
##
# Call #setup when inheriting from this test case.
#
# Provides the following instance variables:
#
# +@m+:: RDoc::Markup.new
# +@RM+:: RDoc::Markup # to reduce typing
# +@bullet_list+:: @RM::List.new :BULLET, # ...
# +@label_list+:: @RM::List.new :LABEL, # ...
# +@lalpha_list+:: @RM::List.new :LALPHA, # ...
# +@note_list+:: @RM::List.new :NOTE, # ...
# +@number_list+:: @RM::List.new :NUMBER, # ...
# +@ualpha_list+:: @RM::List.new :UALPHA, # ...
def setup
super
@options = RDoc::Options.new
@m = @RM.new
@bullet_list = @RM::List.new(:BULLET,
@RM::ListItem.new(nil, @RM::Paragraph.new('l1')),
@RM::ListItem.new(nil, @RM::Paragraph.new('l2')))
@label_list = @RM::List.new(:LABEL,
@RM::ListItem.new('cat', @RM::Paragraph.new('cats are cool')),
@RM::ListItem.new('dog', @RM::Paragraph.new('dogs are cool too')))
@lalpha_list = @RM::List.new(:LALPHA,
@RM::ListItem.new(nil, @RM::Paragraph.new('l1')),
@RM::ListItem.new(nil, @RM::Paragraph.new('l2')))
@note_list = @RM::List.new(:NOTE,
@RM::ListItem.new('cat', @RM::Paragraph.new('cats are cool')),
@RM::ListItem.new('dog', @RM::Paragraph.new('dogs are cool too')))
@number_list = @RM::List.new(:NUMBER,
@RM::ListItem.new(nil, @RM::Paragraph.new('l1')),
@RM::ListItem.new(nil, @RM::Paragraph.new('l2')))
@ualpha_list = @RM::List.new(:UALPHA,
@RM::ListItem.new(nil, @RM::Paragraph.new('l1')),
@RM::ListItem.new(nil, @RM::Paragraph.new('l2')))
end
##
# Call to add the visitor tests to your test case
def self.add_visitor_tests
class_eval do
##
# Calls start_accepting which needs to verify startup state
def test_start_accepting
@to.start_accepting
start_accepting
end
##
# Calls end_accepting on your test case which needs to call
# <tt>@to.end_accepting</tt> and verify document generation
def test_end_accepting
@to.start_accepting
@to.res << 'hi'
end_accepting
end
##
# Calls accept_blank_line
def test_accept_blank_line
@to.start_accepting
@to.accept_blank_line @RM::BlankLine.new
accept_blank_line
end
##
# Calls accept_block_quote
def test_accept_block_quote
@to.start_accepting
@to.accept_block_quote block para 'quote'
accept_block_quote
end
##
# Test case that calls <tt>@to.accept_document</tt>
def test_accept_document
@to.start_accepting
@to.accept_document @RM::Document.new @RM::Paragraph.new 'hello'
accept_document
end
##
# Calls accept_heading with a level 5 RDoc::Markup::Heading
def test_accept_heading
@to.start_accepting
@to.accept_heading @RM::Heading.new(5, 'Hello')
accept_heading
end
##
# Calls accept_heading_1 with a level 1 RDoc::Markup::Heading
def test_accept_heading_1
@to.start_accepting
@to.accept_heading @RM::Heading.new(1, 'Hello')
accept_heading_1
end
##
# Calls accept_heading_2 with a level 2 RDoc::Markup::Heading
def test_accept_heading_2
@to.start_accepting
@to.accept_heading @RM::Heading.new(2, 'Hello')
accept_heading_2
end
##
# Calls accept_heading_3 with a level 3 RDoc::Markup::Heading
def test_accept_heading_3
@to.start_accepting
@to.accept_heading @RM::Heading.new(3, 'Hello')
accept_heading_3
end
##
# Calls accept_heading_4 with a level 4 RDoc::Markup::Heading
def test_accept_heading_4
@to.start_accepting
@to.accept_heading @RM::Heading.new(4, 'Hello')
accept_heading_4
end
##
# Calls accept_heading_b with a bold level 1 RDoc::Markup::Heading
def test_accept_heading_b
@to.start_accepting
@to.accept_heading @RM::Heading.new(1, '*Hello*')
accept_heading_b
end
##
# Calls accept_heading_suppressed_crossref with a level 1
# RDoc::Markup::Heading containing a suppressed crossref
def test_accept_heading_suppressed_crossref # HACK to_html_crossref test
@to.start_accepting
@to.accept_heading @RM::Heading.new(1, '\\Hello')
accept_heading_suppressed_crossref
end
##
# Calls accept_paragraph
def test_accept_paragraph
@to.start_accepting
@to.accept_paragraph @RM::Paragraph.new('hi')
accept_paragraph
end
##
# Calls accept_paragraph_b with a RDoc::Markup::Paragraph containing
# bold words
def test_accept_paragraph_b
@to.start_accepting
@to.accept_paragraph @RM::Paragraph.new('reg <b>bold words</b> reg')
accept_paragraph_b
end
##
# Calls accept_paragraph_br with a RDoc::Markup::Paragraph containing
# a \<br>
def test_accept_paragraph_br
@to.start_accepting
@to.accept_paragraph para 'one<br>two'
accept_paragraph_br
end
##
# Calls accept_paragraph with a Paragraph containing a hard break
def test_accept_paragraph_break
@to.start_accepting
@to.accept_paragraph para('hello', hard_break, 'world')
accept_paragraph_break
end
##
# Calls accept_paragraph_i with a RDoc::Markup::Paragraph containing
# emphasized words
def test_accept_paragraph_i
@to.start_accepting
@to.accept_paragraph @RM::Paragraph.new('reg <em>italic words</em> reg')
accept_paragraph_i
end
##
# Calls accept_paragraph_plus with a RDoc::Markup::Paragraph containing
# teletype words
def test_accept_paragraph_plus
@to.start_accepting
@to.accept_paragraph @RM::Paragraph.new('reg +teletype+ reg')
accept_paragraph_plus
end
##
# Calls accept_paragraph_star with a RDoc::Markup::Paragraph containing
# bold words
def test_accept_paragraph_star
@to.start_accepting
@to.accept_paragraph @RM::Paragraph.new('reg *bold* reg')
accept_paragraph_star
end
##
# Calls accept_paragraph_underscore with a RDoc::Markup::Paragraph
# containing emphasized words
def test_accept_paragraph_underscore
@to.start_accepting
@to.accept_paragraph @RM::Paragraph.new('reg _italic_ reg')
accept_paragraph_underscore
end
##
# Calls accept_verbatim with a RDoc::Markup::Verbatim
def test_accept_verbatim
@to.start_accepting
@to.accept_verbatim @RM::Verbatim.new("hi\n", " world\n")
accept_verbatim
end
##
# Calls accept_raw with a RDoc::Markup::Raw
def test_accept_raw
@to.start_accepting
@to.accept_raw @RM::Raw.new("<table>",
"<tr><th>Name<th>Count",
"<tr><td>a<td>1",
"<tr><td>b<td>2",
"</table>")
accept_raw
end
##
# Calls accept_rule with a RDoc::Markup::Rule
def test_accept_rule
@to.start_accepting
@to.accept_rule @RM::Rule.new(4)
accept_rule
end
##
# Calls accept_list_item_start_bullet
def test_accept_list_item_start_bullet
@to.start_accepting
@to.accept_list_start @bullet_list
@to.accept_list_item_start @bullet_list.items.first
accept_list_item_start_bullet
end
##
# Calls accept_list_item_start_label
def test_accept_list_item_start_label
@to.start_accepting
@to.accept_list_start @label_list
@to.accept_list_item_start @label_list.items.first
accept_list_item_start_label
end
##
# Calls accept_list_item_start_lalpha
def test_accept_list_item_start_lalpha
@to.start_accepting
@to.accept_list_start @lalpha_list
@to.accept_list_item_start @lalpha_list.items.first
accept_list_item_start_lalpha
end
##
# Calls accept_list_item_start_note
def test_accept_list_item_start_note
@to.start_accepting
@to.accept_list_start @note_list
@to.accept_list_item_start @note_list.items.first
accept_list_item_start_note
end
##
# Calls accept_list_item_start_note_2
def test_accept_list_item_start_note_2
list = list(:NOTE,
item('<tt>teletype</tt>',
para('teletype description')))
@to.start_accepting
list.accept @to
@to.end_accepting
accept_list_item_start_note_2
end
##
# Calls accept_list_item_start_note_multi_description
def test_accept_list_item_start_note_multi_description
list = list(:NOTE,
item(%w[label],
para('description one')),
item(nil, para('description two')))
@to.start_accepting
list.accept @to
@to.end_accepting
accept_list_item_start_note_multi_description
end
##
# Calls accept_list_item_start_note_multi_label
def test_accept_list_item_start_note_multi_label
list = list(:NOTE,
item(%w[one two],
para('two headers')))
@to.start_accepting
list.accept @to
@to.end_accepting
accept_list_item_start_note_multi_label
end
##
# Calls accept_list_item_start_number
def test_accept_list_item_start_number
@to.start_accepting
@to.accept_list_start @number_list
@to.accept_list_item_start @number_list.items.first
accept_list_item_start_number
end
##
# Calls accept_list_item_start_ualpha
def test_accept_list_item_start_ualpha
@to.start_accepting
@to.accept_list_start @ualpha_list
@to.accept_list_item_start @ualpha_list.items.first
accept_list_item_start_ualpha
end
##
# Calls accept_list_item_end_bullet
def test_accept_list_item_end_bullet
@to.start_accepting
@to.accept_list_start @bullet_list
@to.accept_list_item_start @bullet_list.items.first
@to.accept_list_item_end @bullet_list.items.first
accept_list_item_end_bullet
end
##
# Calls accept_list_item_end_label
def test_accept_list_item_end_label
@to.start_accepting
@to.accept_list_start @label_list
@to.accept_list_item_start @label_list.items.first
@to.accept_list_item_end @label_list.items.first
accept_list_item_end_label
end
##
# Calls accept_list_item_end_lalpha
def test_accept_list_item_end_lalpha
@to.start_accepting
@to.accept_list_start @lalpha_list
@to.accept_list_item_start @lalpha_list.items.first
@to.accept_list_item_end @lalpha_list.items.first
accept_list_item_end_lalpha
end
##
# Calls accept_list_item_end_note
def test_accept_list_item_end_note
@to.start_accepting
@to.accept_list_start @note_list
@to.accept_list_item_start @note_list.items.first
@to.accept_list_item_end @note_list.items.first
accept_list_item_end_note
end
##
# Calls accept_list_item_end_number
def test_accept_list_item_end_number
@to.start_accepting
@to.accept_list_start @number_list
@to.accept_list_item_start @number_list.items.first
@to.accept_list_item_end @number_list.items.first
accept_list_item_end_number
end
##
# Calls accept_list_item_end_ualpha
def test_accept_list_item_end_ualpha
@to.start_accepting
@to.accept_list_start @ualpha_list
@to.accept_list_item_start @ualpha_list.items.first
@to.accept_list_item_end @ualpha_list.items.first
accept_list_item_end_ualpha
end
##
# Calls accept_list_start_bullet
def test_accept_list_start_bullet
@to.start_accepting
@to.accept_list_start @bullet_list
accept_list_start_bullet
end
##
# Calls accept_list_start_label
def test_accept_list_start_label
@to.start_accepting
@to.accept_list_start @label_list
accept_list_start_label
end
##
# Calls accept_list_start_lalpha
def test_accept_list_start_lalpha
@to.start_accepting
@to.accept_list_start @lalpha_list
accept_list_start_lalpha
end
##
# Calls accept_list_start_note
def test_accept_list_start_note
@to.start_accepting
@to.accept_list_start @note_list
accept_list_start_note
end
##
# Calls accept_list_start_number
def test_accept_list_start_number
@to.start_accepting
@to.accept_list_start @number_list
accept_list_start_number
end
##
# Calls accept_list_start_ualpha
def test_accept_list_start_ualpha
@to.start_accepting
@to.accept_list_start @ualpha_list
accept_list_start_ualpha
end
##
# Calls accept_list_end_bullet
def test_accept_list_end_bullet
@to.start_accepting
@to.accept_list_start @bullet_list
@to.accept_list_end @bullet_list
accept_list_end_bullet
end
##
# Calls accept_list_end_label
def test_accept_list_end_label
@to.start_accepting
@to.accept_list_start @label_list
@to.accept_list_end @label_list
accept_list_end_label
end
##
# Calls accept_list_end_lalpha
def test_accept_list_end_lalpha
@to.start_accepting
@to.accept_list_start @lalpha_list
@to.accept_list_end @lalpha_list
accept_list_end_lalpha
end
##
# Calls accept_list_end_number
def test_accept_list_end_number
@to.start_accepting
@to.accept_list_start @number_list
@to.accept_list_end @number_list
accept_list_end_number
end
##
# Calls accept_list_end_note
def test_accept_list_end_note
@to.start_accepting
@to.accept_list_start @note_list
@to.accept_list_end @note_list
accept_list_end_note
end
##
# Calls accept_list_end_ualpha
def test_accept_list_end_ualpha
@to.start_accepting
@to.accept_list_start @ualpha_list
@to.accept_list_end @ualpha_list
accept_list_end_ualpha
end
##
# Calls list_nested with a two-level list
def test_list_nested
doc = @RM::Document.new(
@RM::List.new(:BULLET,
@RM::ListItem.new(nil,
@RM::Paragraph.new('l1'),
@RM::List.new(:BULLET,
@RM::ListItem.new(nil,
@RM::Paragraph.new('l1.1')))),
@RM::ListItem.new(nil,
@RM::Paragraph.new('l2'))))
doc.accept @to
list_nested
end
##
# Calls list_verbatim with a list containing a verbatim block
def test_list_verbatim # HACK overblown
doc =
doc(
list(:BULLET,
item(nil,
para('list stuff'),
blank_line,
verb("* list\n",
" with\n",
"\n",
" second\n",
"\n",
" 1. indented\n",
" 2. numbered\n",
"\n",
" third\n",
"\n",
"* second\n"))))
doc.accept @to
list_verbatim
end
end
end
end
|
db199310/kubeform | apis/azurerm/v1alpha1/storage_account_types.go | <filename>apis/azurerm/v1alpha1/storage_account_types.go
/*
Copyright The Kubeform Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by Kubeform. DO NOT EDIT.
package v1alpha1
import (
base "kubeform.dev/kubeform/apis/base/v1alpha1"
core "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
// +genclient
// +k8s:openapi-gen=true
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +kubebuilder:object:root=true
// +kubebuilder:subresource:status
// +kubebuilder:printcolumn:name="Phase",type=string,JSONPath=`.status.phase`
type StorageAccount struct {
metav1.TypeMeta `json:",inline,omitempty"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Spec StorageAccountSpec `json:"spec,omitempty"`
Status StorageAccountStatus `json:"status,omitempty"`
}
type StorageAccountSpecBlobPropertiesDeleteRetentionPolicy struct {
// +optional
Days int64 `json:"days,omitempty" tf:"days,omitempty"`
}
type StorageAccountSpecBlobProperties struct {
// +optional
// +kubebuilder:validation:MaxItems=1
DeleteRetentionPolicy []StorageAccountSpecBlobPropertiesDeleteRetentionPolicy `json:"deleteRetentionPolicy,omitempty" tf:"delete_retention_policy,omitempty"`
}
type StorageAccountSpecCustomDomain struct {
Name string `json:"name" tf:"name"`
// +optional
UseSubdomain bool `json:"useSubdomain,omitempty" tf:"use_subdomain,omitempty"`
}
type StorageAccountSpecIdentity struct {
// +optional
PrincipalID string `json:"principalID,omitempty" tf:"principal_id,omitempty"`
// +optional
TenantID string `json:"tenantID,omitempty" tf:"tenant_id,omitempty"`
Type string `json:"type" tf:"type"`
}
type StorageAccountSpecNetworkRules struct {
// +optional
Bypass []string `json:"bypass,omitempty" tf:"bypass,omitempty"`
DefaultAction string `json:"defaultAction" tf:"default_action"`
// +optional
IpRules []string `json:"ipRules,omitempty" tf:"ip_rules,omitempty"`
// +optional
VirtualNetworkSubnetIDS []string `json:"virtualNetworkSubnetIDS,omitempty" tf:"virtual_network_subnet_ids,omitempty"`
}
type StorageAccountSpecQueuePropertiesCorsRule struct {
// +kubebuilder:validation:MaxItems=64
AllowedHeaders []string `json:"allowedHeaders" tf:"allowed_headers"`
// +kubebuilder:validation:MaxItems=64
AllowedMethods []string `json:"allowedMethods" tf:"allowed_methods"`
// +kubebuilder:validation:MaxItems=64
AllowedOrigins []string `json:"allowedOrigins" tf:"allowed_origins"`
// +kubebuilder:validation:MaxItems=64
ExposedHeaders []string `json:"exposedHeaders" tf:"exposed_headers"`
MaxAgeInSeconds int64 `json:"maxAgeInSeconds" tf:"max_age_in_seconds"`
}
type StorageAccountSpecQueuePropertiesHourMetrics struct {
Enabled bool `json:"enabled" tf:"enabled"`
// +optional
IncludeApis bool `json:"includeApis,omitempty" tf:"include_apis,omitempty"`
// +optional
RetentionPolicyDays int64 `json:"retentionPolicyDays,omitempty" tf:"retention_policy_days,omitempty"`
Version string `json:"version" tf:"version"`
}
type StorageAccountSpecQueuePropertiesLogging struct {
Delete bool `json:"delete" tf:"delete"`
Read bool `json:"read" tf:"read"`
// +optional
RetentionPolicyDays int64 `json:"retentionPolicyDays,omitempty" tf:"retention_policy_days,omitempty"`
Version string `json:"version" tf:"version"`
Write bool `json:"write" tf:"write"`
}
type StorageAccountSpecQueuePropertiesMinuteMetrics struct {
Enabled bool `json:"enabled" tf:"enabled"`
// +optional
IncludeApis bool `json:"includeApis,omitempty" tf:"include_apis,omitempty"`
// +optional
RetentionPolicyDays int64 `json:"retentionPolicyDays,omitempty" tf:"retention_policy_days,omitempty"`
Version string `json:"version" tf:"version"`
}
type StorageAccountSpecQueueProperties struct {
// +optional
// +kubebuilder:validation:MaxItems=5
CorsRule []StorageAccountSpecQueuePropertiesCorsRule `json:"corsRule,omitempty" tf:"cors_rule,omitempty"`
// +optional
// +kubebuilder:validation:MaxItems=1
HourMetrics []StorageAccountSpecQueuePropertiesHourMetrics `json:"hourMetrics,omitempty" tf:"hour_metrics,omitempty"`
// +optional
// +kubebuilder:validation:MaxItems=1
Logging []StorageAccountSpecQueuePropertiesLogging `json:"logging,omitempty" tf:"logging,omitempty"`
// +optional
// +kubebuilder:validation:MaxItems=1
MinuteMetrics []StorageAccountSpecQueuePropertiesMinuteMetrics `json:"minuteMetrics,omitempty" tf:"minute_metrics,omitempty"`
}
type StorageAccountSpec struct {
ProviderRef core.LocalObjectReference `json:"providerRef" tf:"-"`
ID string `json:"id,omitempty" tf:"id,omitempty"`
SecretRef *core.LocalObjectReference `json:"secretRef,omitempty" tf:"-"`
// +optional
AccessTier string `json:"accessTier,omitempty" tf:"access_tier,omitempty"`
// +optional
AccountEncryptionSource string `json:"accountEncryptionSource,omitempty" tf:"account_encryption_source,omitempty"`
// +optional
AccountKind string `json:"accountKind,omitempty" tf:"account_kind,omitempty"`
AccountReplicationType string `json:"accountReplicationType" tf:"account_replication_type"`
AccountTier string `json:"accountTier" tf:"account_tier"`
// +optional
// Deprecated
AccountType string `json:"accountType,omitempty" tf:"account_type,omitempty"`
// +optional
// +kubebuilder:validation:MaxItems=1
BlobProperties []StorageAccountSpecBlobProperties `json:"blobProperties,omitempty" tf:"blob_properties,omitempty"`
// +optional
// +kubebuilder:validation:MaxItems=1
CustomDomain []StorageAccountSpecCustomDomain `json:"customDomain,omitempty" tf:"custom_domain,omitempty"`
// +optional
// Deprecated
EnableAdvancedThreatProtection bool `json:"enableAdvancedThreatProtection,omitempty" tf:"enable_advanced_threat_protection,omitempty"`
// +optional
EnableBlobEncryption bool `json:"enableBlobEncryption,omitempty" tf:"enable_blob_encryption,omitempty"`
// +optional
EnableFileEncryption bool `json:"enableFileEncryption,omitempty" tf:"enable_file_encryption,omitempty"`
// +optional
EnableHTTPSTrafficOnly bool `json:"enableHTTPSTrafficOnly,omitempty" tf:"enable_https_traffic_only,omitempty"`
// +optional
// +kubebuilder:validation:MaxItems=1
Identity []StorageAccountSpecIdentity `json:"identity,omitempty" tf:"identity,omitempty"`
// +optional
IsHnsEnabled bool `json:"isHnsEnabled,omitempty" tf:"is_hns_enabled,omitempty"`
Location string `json:"location" tf:"location"`
Name string `json:"name" tf:"name"`
// +optional
// +kubebuilder:validation:MaxItems=1
NetworkRules []StorageAccountSpecNetworkRules `json:"networkRules,omitempty" tf:"network_rules,omitempty"`
// +optional
PrimaryAccessKey string `json:"-" sensitive:"true" tf:"primary_access_key,omitempty"`
// +optional
PrimaryBlobConnectionString string `json:"-" sensitive:"true" tf:"primary_blob_connection_string,omitempty"`
// +optional
PrimaryBlobEndpoint string `json:"primaryBlobEndpoint,omitempty" tf:"primary_blob_endpoint,omitempty"`
// +optional
PrimaryBlobHost string `json:"primaryBlobHost,omitempty" tf:"primary_blob_host,omitempty"`
// +optional
PrimaryConnectionString string `json:"-" sensitive:"true" tf:"primary_connection_string,omitempty"`
// +optional
PrimaryDfsEndpoint string `json:"primaryDfsEndpoint,omitempty" tf:"primary_dfs_endpoint,omitempty"`
// +optional
PrimaryDfsHost string `json:"primaryDfsHost,omitempty" tf:"primary_dfs_host,omitempty"`
// +optional
PrimaryFileEndpoint string `json:"primaryFileEndpoint,omitempty" tf:"primary_file_endpoint,omitempty"`
// +optional
PrimaryFileHost string `json:"primaryFileHost,omitempty" tf:"primary_file_host,omitempty"`
// +optional
PrimaryLocation string `json:"primaryLocation,omitempty" tf:"primary_location,omitempty"`
// +optional
PrimaryQueueEndpoint string `json:"primaryQueueEndpoint,omitempty" tf:"primary_queue_endpoint,omitempty"`
// +optional
PrimaryQueueHost string `json:"primaryQueueHost,omitempty" tf:"primary_queue_host,omitempty"`
// +optional
PrimaryTableEndpoint string `json:"primaryTableEndpoint,omitempty" tf:"primary_table_endpoint,omitempty"`
// +optional
PrimaryTableHost string `json:"primaryTableHost,omitempty" tf:"primary_table_host,omitempty"`
// +optional
PrimaryWebEndpoint string `json:"primaryWebEndpoint,omitempty" tf:"primary_web_endpoint,omitempty"`
// +optional
PrimaryWebHost string `json:"primaryWebHost,omitempty" tf:"primary_web_host,omitempty"`
// +optional
// +kubebuilder:validation:MaxItems=1
QueueProperties []StorageAccountSpecQueueProperties `json:"queueProperties,omitempty" tf:"queue_properties,omitempty"`
ResourceGroupName string `json:"resourceGroupName" tf:"resource_group_name"`
// +optional
SecondaryAccessKey string `json:"-" sensitive:"true" tf:"secondary_access_key,omitempty"`
// +optional
SecondaryBlobConnectionString string `json:"-" sensitive:"true" tf:"secondary_blob_connection_string,omitempty"`
// +optional
SecondaryBlobEndpoint string `json:"secondaryBlobEndpoint,omitempty" tf:"secondary_blob_endpoint,omitempty"`
// +optional
SecondaryBlobHost string `json:"secondaryBlobHost,omitempty" tf:"secondary_blob_host,omitempty"`
// +optional
SecondaryConnectionString string `json:"-" sensitive:"true" tf:"secondary_connection_string,omitempty"`
// +optional
SecondaryDfsEndpoint string `json:"secondaryDfsEndpoint,omitempty" tf:"secondary_dfs_endpoint,omitempty"`
// +optional
SecondaryDfsHost string `json:"secondaryDfsHost,omitempty" tf:"secondary_dfs_host,omitempty"`
// +optional
SecondaryFileEndpoint string `json:"secondaryFileEndpoint,omitempty" tf:"secondary_file_endpoint,omitempty"`
// +optional
SecondaryFileHost string `json:"secondaryFileHost,omitempty" tf:"secondary_file_host,omitempty"`
// +optional
SecondaryLocation string `json:"secondaryLocation,omitempty" tf:"secondary_location,omitempty"`
// +optional
SecondaryQueueEndpoint string `json:"secondaryQueueEndpoint,omitempty" tf:"secondary_queue_endpoint,omitempty"`
// +optional
SecondaryQueueHost string `json:"secondaryQueueHost,omitempty" tf:"secondary_queue_host,omitempty"`
// +optional
SecondaryTableEndpoint string `json:"secondaryTableEndpoint,omitempty" tf:"secondary_table_endpoint,omitempty"`
// +optional
SecondaryTableHost string `json:"secondaryTableHost,omitempty" tf:"secondary_table_host,omitempty"`
// +optional
SecondaryWebEndpoint string `json:"secondaryWebEndpoint,omitempty" tf:"secondary_web_endpoint,omitempty"`
// +optional
SecondaryWebHost string `json:"secondaryWebHost,omitempty" tf:"secondary_web_host,omitempty"`
// +optional
Tags map[string]string `json:"tags,omitempty" tf:"tags,omitempty"`
}
type StorageAccountStatus struct {
// Resource generation, which is updated on mutation by the API Server.
// +optional
ObservedGeneration int64 `json:"observedGeneration,omitempty"`
// +optional
Output *StorageAccountSpec `json:"output,omitempty"`
// +optional
State *base.State `json:"state,omitempty"`
// +optional
Phase base.Phase `json:"phase,omitempty"`
// +optional
TerraformErrors []string `json:"terraformErrors,omitempty"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +kubebuilder:object:root=true
// StorageAccountList is a list of StorageAccounts
type StorageAccountList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
// Items is a list of StorageAccount CRD objects
Items []StorageAccount `json:"items,omitempty"`
}
|
grahams58/athens | cmd/olympus/actions/app.go | <gh_stars>1-10
package actions
import (
"fmt"
stdlog "log"
"time"
"github.com/gobuffalo/buffalo"
"github.com/gobuffalo/buffalo/middleware"
"github.com/gobuffalo/buffalo/middleware/csrf"
"github.com/gobuffalo/buffalo/middleware/i18n"
"github.com/gobuffalo/buffalo/middleware/ssl"
"github.com/gobuffalo/buffalo/worker"
"github.com/gobuffalo/gocraft-work-adapter"
"github.com/gobuffalo/packr"
"github.com/gocraft/work"
"github.com/gomods/athens/pkg/config"
"github.com/gomods/athens/pkg/download"
"github.com/gomods/athens/pkg/eventlog"
"github.com/gomods/athens/pkg/log"
"github.com/gomods/athens/pkg/module"
"github.com/gomods/athens/pkg/stash"
"github.com/gomods/athens/pkg/storage"
"github.com/gomodule/redigo/redis"
"github.com/rs/cors"
"github.com/sirupsen/logrus"
"github.com/spf13/afero"
"github.com/unrolled/secure"
)
type workerConfig struct {
store storage.Backend
eLog eventlog.Eventlog
wType string
redisEndpoint string
maxConc int
maxFails uint
downloadTimeout time.Duration
}
const (
// OlympusWorkerName is the name of the Olympus worker
OlympusWorkerName = "olympus-worker"
// DownloadHandlerName is name of the handler downloading packages from VCS
DownloadHandlerName = "download-handler"
// PushNotificationHandlerName is the name of the handler processing push notifications
PushNotificationHandlerName = "push-notification-worker"
)
var (
workerQueue = "default"
workerModuleKey = "module"
workerVersionKey = "version"
workerPushNotificationKey = "push-notification"
// T is buffalo Translator
T *i18n.Translator
)
// Service is the name of the service that we want to tag our processes with
const Service = "olympus"
// App is where all routes and middleware for buffalo should be defined.
// This is the nerve center of your application.
func App(conf *config.Config) (*buffalo.App, error) {
// ENV is used to help switch settings based on where the
// application is being run. Default is "development".
ENV := conf.GoEnv
storage, err := GetStorage(conf.Olympus.StorageType, conf.Storage)
if err != nil {
return nil, err
}
if conf.Storage == nil || conf.Storage.Mongo == nil {
return nil, fmt.Errorf("A valid Mongo configuration is required to create the event log")
}
eLog, err := GetEventLog(conf.Storage.Mongo.URL, conf.Storage.Mongo.CertPath, conf.Storage.Mongo.TimeoutDuration())
if err != nil {
return nil, fmt.Errorf("error creating eventlog (%s)", err)
}
wConf := workerConfig{
store: storage,
eLog: eLog,
wType: conf.Olympus.WorkerType,
maxConc: conf.MaxConcurrency,
maxFails: conf.MaxWorkerFails,
downloadTimeout: conf.TimeoutDuration(),
redisEndpoint: conf.Olympus.RedisQueueAddress,
}
w, err := getWorker(wConf)
if err != nil {
return nil, err
}
logLvl, err := logrus.ParseLevel(conf.LogLevel)
if err != nil {
return nil, err
}
lggr := log.New(conf.CloudRuntime, logLvl)
bLogLvl, err := logrus.ParseLevel(conf.BuffaloLogLevel)
if err != nil {
return nil, err
}
blggr := log.Buffalo(bLogLvl)
app := buffalo.New(buffalo.Options{
Addr: conf.Olympus.Port,
Host: "http://127.0.0.1" + conf.Proxy.Port,
Env: ENV,
PreWares: []buffalo.PreWare{
cors.Default().Handler,
},
SessionName: "_olympus_session",
Worker: w,
WorkerOff: true, // TODO(marwan): turned off until worker is being used.
Logger: blggr,
})
// Automatically redirect to SSL
app.Use(ssl.ForceSSL(secure.Options{
SSLRedirect: ENV == "production",
SSLProxyHeaders: map[string]string{"X-Forwarded-Proto": "https"},
}))
if ENV == "development" {
app.Use(middleware.ParameterLogger)
}
// Protect against CSRF attacks. https://www.owasp.org/index.php/Cross-Site_Request_Forgery_(CSRF)
// Remove to disable this.
if conf.EnableCSRFProtection {
csrfMiddleware := csrf.New
app.Use(csrfMiddleware)
}
// Setup and use translations:
if T, err = i18n.New(packr.NewBox("../locales"), "en-US"); err != nil {
app.Stop(err)
}
app.Use(T.Middleware())
app.GET("/diff/{lastID}", diffHandler(storage, eLog))
app.GET("/feed/{lastID}", feedHandler(storage))
app.GET("/eventlog/{sequence_id}", eventlogHandler(eLog))
app.POST("/cachemiss", cachemissHandler(w))
app.POST("/push", pushNotificationHandler(w))
app.GET("/healthz", healthHandler)
// Download Protocol
goBin := conf.GoBinary
fs := afero.NewOsFs()
mf, err := module.NewGoGetFetcher(goBin, fs)
if err != nil {
return nil, err
}
lister := download.NewVCSLister(goBin, fs)
st := stash.New(mf, storage)
dpOpts := &download.Opts{
Storage: storage,
Stasher: st,
Lister: lister,
}
dp := download.New(dpOpts)
handlerOpts := &download.HandlerOpts{Protocol: dp, Logger: lggr, Engine: renderEng}
download.RegisterHandlers(app, handlerOpts)
app.ServeFiles("/", assetsBox) // serve files from the public directory
return app, nil
}
func getWorker(wConf workerConfig) (worker.Worker, error) {
switch wConf.wType {
case "redis":
return registerRedis(wConf)
case "memory":
return registerInMem(wConf)
default:
stdlog.Printf("Provided background worker type %s. Expected redis|memory. Defaulting to memory", wConf.wType)
return registerInMem(wConf)
}
}
func registerInMem(wConf workerConfig) (worker.Worker, error) {
w := worker.NewSimple()
if err := w.Register(PushNotificationHandlerName, GetProcessPushNotificationJob(wConf.store, wConf.eLog, wConf.downloadTimeout)); err != nil {
return nil, err
}
return w, nil
}
func registerRedis(wConf workerConfig) (worker.Worker, error) {
addr := wConf.redisEndpoint
w := gwa.New(gwa.Options{
Pool: &redis.Pool{
MaxActive: 5,
MaxIdle: 5,
Wait: true,
Dial: func() (redis.Conn, error) {
return redis.Dial("tcp", addr)
},
},
Name: OlympusWorkerName,
MaxConcurrency: wConf.maxConc,
})
opts := work.JobOptions{
SkipDead: true,
MaxFails: wConf.maxFails,
}
return w, w.RegisterWithOptions(PushNotificationHandlerName, opts, GetProcessPushNotificationJob(wConf.store, wConf.eLog, wConf.downloadTimeout))
}
|
xKuZz/trabajosugr | 2/ED/practica5 _ Arbol AVL/avl.hxx | <reponame>xKuZz/trabajosugr<filename>2/ED/practica5 _ Arbol AVL/avl.hxx
#include "avl.h" // AUTOCOMPLETAR
#define AVL_TEMPLATE template <typename T, class comparar>
/** @file avl.hxx
* @brief Implementación del Árbol AVL a partir de bintree.
* @author <NAME>.
*/
/*----------------------- CONSIDERACIONES INICIALES -------------------------*/
/** Modificaciones a bintree proporcionado
* A la estructura de datos proporcionada se le han hecho las siguientes
* modificaciones:
*
* bintree::inorder_iterator
* -He añadido el operador de predecremento (--it).
* -He añadido un puntero a un bintree para poder hacer el decremento.
* -inorder_iterator ahora es amigo de AVL.
* -El constructor de copia que estaba declarado pero no implementado ahora
* es implementado por el compilador.
*
* bintree::node
* -node es ahora amigo de AVL
*
* bintree
* -bintree es ahora amigo de AVL.
* -swap: Añadido método swap.
* -begin_inoder: modificado para poner el puntero a bintree (necesario --).
* -end_inorder: modificado para poner el puntero a bintree (necesario --).
*
* bintreeNode.hxx:
* -Eliminados multiples ; después de implementaciones de métodos que no
* permitián compilar.
*/
/** Algunas abreviaciones utilizadas
* Con la idea de facilitar la lectura del código se utilizan la siguientes
* abreviaciones basadas en macro y alias frecuentemente:
* Basados en macros:
* AVL_TEMPLATE equivale a template <typename T, class comparar>
* get<AVL_Dato>(*n) Devuelve referencia al valor de la etiqueta del nodo.
* get<AVL_Altura(*n) Devuelve referencia a la altura de la etiqueta del nodo.
* Basados en alias:
* avl_tree_t equivale a bintree<pair<T, altura>>
* avl_node_t equivale a typename avl_tree_t::node
*/
/*----------------------- CONSTRUCTORES-ASIGNACIÓN --------------------------*/
/** Métodos para la construcción y asignación de AVL
* Los constructores por defecto y de copia y el operador de asignación
* son creados automáticamente por el compilador
*
* Los constructores de rango y lista de inicialización son los aquí creados
* y llaman a sus correspondientes versiones de insert
*/
AVL_TEMPLATE
template <typename InputIterator>
AVL<T, comparar>::AVL(InputIterator first, InputIterator last) {
insert(first,last);
}
AVL_TEMPLATE
AVL<T, comparar>::AVL(initializer_list<T> il) {
for (auto valor : il)
insert(valor);
}
/*-------------------------------- ALTURA -----------------------------------*/
/** Métodos privados relacionados con el tratamiento de la altura
* Existen 3 métodos
* Un método h que devuelve la altura del nodo o -1 si el nodo es nulo.
* Un método que actualiza la altura tras insertar mientras sea pertinente.
* Un método que actualiza la altura tras borrar mientras sea pertinente.
* El método utilizado tras borrar es también se usa para las rotaciones.
*/
AVL_TEMPLATE
typename AVL<T,comparar>::altura AVL<T,comparar>::h(avl_node_t n) {
if (n.elnodo == nullptr)
return -1;
else
return get<AVL_Altura>(*n);
}
AVL_TEMPLATE
void AVL<T, comparar>::ajustar_altura(avl_node_t n) {
for (avl_node_t nodo = n; !nodo.null(); nodo = nodo.parent()) {
auto altura_correcta = max(h(nodo.elnodo->izda),
h(nodo.elnodo->dcha)) + 1;
if (get<AVL_Altura>(*nodo) == altura_correcta)
break;
else
get<AVL_Altura>(*nodo) = altura_correcta;
}
}
/*--------------------------ROTACIONES AVL ----------------------------------*/
/** Métodos privados para comprobar y realizar rotaciones AVL.
* El método balancear recibe la raíz de un subárbol y comprueba si es
* necesario balancear. En caso de que no sea necesario devuelve false.
*
* El método rotar realiza las rotaciones dependiendo del tipo.
*
* Las rotaciones ajustan todos los punteros implicados de padres e hijos.
*
* Las rotaciones dobles están implementadas en función de las simples.
*/
AVL_TEMPLATE
bool AVL<T, comparar>::balancear(avl_node_t n) {
// Caso A: La parte izquierda es más alta que la derecha
if (h(n.elnodo->izda) - h(n.elnodo->dcha) >= 2)
// Caso A1: Rotación normal
if (h(n.elnodo->izda.elnodo->izda) >= h(n.elnodo->izda.elnodo->dcha))
rotacion(n, ROTAR::DERECHA);
// Caso A2: Rotación zig-zag
else
rotacion(n, ROTAR::IZQUIERDA_DERECHA);
// Caso B: La parte derecha es más alta que la izquierda
else if (h(n.elnodo->dcha) - h(n.elnodo->izda) >= 2) {
// Caso B1: Rotación normal
if (h(n.elnodo->dcha.elnodo->dcha) >= h(n.elnodo->dcha.elnodo->izda))
rotacion(n, ROTAR::IZQUIERDA);
// Caso B2: Rotación zig-zag
else
rotacion(n, ROTAR::DERECHA_IZQUIERDA);
}
else if (!n.parent().null())
return false;
return true;
}
AVL_TEMPLATE
void AVL<T, comparar>::rotacion(avl_node_t n, ROTAR r) {
avl_node_t aux;
switch (r) {
case ROTAR::IZQUIERDA:
// Guardo el hijo izquierda del hijo derecha (si existe)
if (!n.right().null()) {
aux = n.elnodo->dcha.elnodo->izda;
if (!aux.null())
aux.elnodo->pad = n;
}
// Preparo al hijo derecha para ser la raíz
n.elnodo->dcha.elnodo->pad = n.elnodo->pad;
n.elnodo->dcha.elnodo->izda = n;
// Rotamos
n = n.elnodo->dcha;
// Fijo los datos del padre de la raíz para la rotación
if (n.parent().null())
el_avl.laraiz = n;
else if (n.elnodo->pad.elnodo->izda == n.elnodo->izda &&
!n.elnodo->izda.null())
n.elnodo->pad.elnodo->izda = n;
else if (n.elnodo->pad.elnodo->dcha == n.elnodo->izda &&
!n.elnodo->dcha.null())
n.elnodo->pad.elnodo->dcha = n;
// Reajusto los datos de la antigua raíz (añadiendo el dato guardado)
n.elnodo->izda.elnodo->pad = n;
n.elnodo->izda.elnodo->dcha = aux;
get<AVL_Altura>(*n.left()) = max(h(n.left().left()),
h(n.left().right())) + 1;
get<AVL_Altura>(*n) = max(h(n.left()),
h(n.right())) + 1;
ajustar_altura(n.elnodo->pad);
break;
case ROTAR::DERECHA:
// Guardo el hijo derecha del hijo izquierda (si existe)
if (!n.left().null()) {
aux = n.elnodo->izda.elnodo->dcha;
if (!aux.null())
aux.elnodo->pad = n;
}
// Preparo al hijo izquierda para ser la raíz
n.elnodo->izda.elnodo->pad = n.elnodo->pad;
n.elnodo->izda.elnodo->dcha = n;
// Rotamos
n = n.elnodo->izda;
// Fijo los datos del padre de la raíz para la rotación
if (n.parent().null())
el_avl.laraiz = n;
else if (n.elnodo->pad.elnodo->izda == n.elnodo->dcha &&
!n.elnodo->izda.null())
n.elnodo->pad.elnodo->izda = n;
else if (n.elnodo->pad.elnodo->dcha == n.elnodo->dcha &&
!n.elnodo->dcha.null())
n.elnodo->pad.elnodo->dcha = n;
// Reajusto los datos de la antigua raíz (añadiendo el dato guardado)
n.elnodo->dcha.elnodo->pad = n;
n.elnodo->dcha.elnodo->izda = aux;
// Reajusto las alturas
get<AVL_Altura>(*n.right()) = max(h(n.right().left()),
h(n.right().right())) + 1;
get<AVL_Altura>(*n) = max(h(n.left()),
h(n.right())) + 1;
ajustar_altura(n.elnodo->pad);
break;
case ROTAR::DERECHA_IZQUIERDA:
rotacion(n.elnodo->dcha, ROTAR::DERECHA);
rotacion(n , ROTAR::IZQUIERDA);
break;
case ROTAR::IZQUIERDA_DERECHA:
rotacion(n.elnodo->izda, ROTAR::IZQUIERDA);
rotacion(n , ROTAR::DERECHA);
break;
}
}
/*------------------------- AVL: INSERTAR DATOS -----------------------------*/
/** Métodos para la inserción de datos en el árbol.
* La lógica requerida para la implementación es manejada en la versión que
* recibe un valor como parámetro, el resto de implementaciones se basan en
* la primera.
*
* El algoritmo implementado baraja los siguientes casos:
* 1- El árbol está vacío.
* 2- El dato no se encuentra en el árbol.
* 3- El árbol se encuentra en el árbol.
* Si un dato ha sido insertado se reajusta su altura y se comprueba el
* equilibrio de la estructura parando o bien cuándo se produce una rotación
* o bien si se llega a la raíz sin rotar.
*/
AVL_TEMPLATE
pair<typename AVL<T, comparar>::iterator, bool> AVL<T, comparar>::insert
(const T& val) {
auto nodo = el_avl.laraiz;
bool insertado = false;
if (nodo.null()) { // Árbol vacío -> Inserto como raíz
el_avl = avl_tree_t({val, 0});
return { iterator(el_avl.laraiz), true};
}
else
while (!insertado) {
if (cmp(val, get<AVL_Dato>(*nodo))) { // CMP: MENOR -> IZQUIERDA
if (nodo.left().null()) {
el_avl.insert_left (nodo, {val, 0});
insertado = true;
}
nodo = nodo.left();
}
else if (cmp(get<AVL_Dato>(*nodo), val)) { // CMP: MAYOR -> DERECHA
if (nodo.right().null()) {
el_avl.insert_right(nodo, {val,0});
insertado = true;
}
nodo = nodo.right();
}
else // CMP: IGUAL
return { iterator(nodo), false };
}
++tama;
ajustar_altura(nodo.parent());
for (auto aux = nodo; !balancear(aux) ;aux = aux.parent()) {}
return { iterator(nodo), true };
}
AVL_TEMPLATE
template <typename InputIterator>
void AVL<T, comparar>::insert(InputIterator first, InputIterator last) {
for_each(first, last, [this](InputIterator it) { insert(*it); });
}
AVL_TEMPLATE
void AVL<T, comparar>::insert(initializer_list<T> il) {
for (auto valor : il)
insert(valor);
}
/*------------------------- AVL: BORRAR DATOS -------------------------------*/
/** Métodos para la eliminación de elementos del árbol
* La lógica es manejada en la versión 1 que recibe un iterador costante y
* devuelve un iterador.
* El resto de métodos dependen del primero.
* Además existen un método privado en la sección del calculo de alturas
* que permite actualizar las alturas tras la eliminación de un dato.
* El algoritmo se divide en 4 casos:
* Caso 1: Iterador inválido
* Caso 2: El elemento a borrar es una hoja (Raíz | No Raíz)
* Caso 3: El elemento a borrar tiene sólo un hijo
* Caso 4: El elemento a borrar tiene dos hijos. (Llamada recursiva).
* Una vez el algoritmo borra el elemento se actualizan las alturas y se
* comprueba la condición de equilibrio subiendo hasta la raíz.
*/
AVL_TEMPLATE
typename AVL<T, comparar>::iterator AVL<T, comparar>::erase(
const_iterator position) {
if (position == cend()) return end(); // CASO 1: ITERADOR INVÁLIDO
else {
avl_node_t nodo_padre = position.iter.elnodo.elnodo->pad;
avl_node_t nodo = position.iter.elnodo;
avl_node_t aux;
avl_tree_t tree_aux;
iterator salida;
salida.iter = ++position.iter;
if (nodo.left().null() && nodo.right().null()) { // CASO 2: HOJA
if (nodo_padre.null()) // Padre: Raíz
el_avl.clear();
else { // Padre: No raíz
if (nodo_padre.left() == nodo) // Soy hijo izquierda
el_avl.prune_left (nodo_padre, tree_aux);
else // Soy hijo derecha
el_avl.prune_right(nodo_padre, tree_aux);
tree_aux.clear(); // Libero memoria
}
// Ajusto alturas y compruebo rotaciones
--tama;
aux = nodo_padre;
}
else if (!nodo.left().null() != !nodo.right().null()) { // CASO 3: HIJO ÚNICO
if (!nodo.left().null()) // Sólo hay hijo izquierda
el_avl.prune_left (nodo, tree_aux);
else // Sólo hay hijo derecha
el_avl.prune_right(nodo, tree_aux);
if (nodo_padre.null()) { // Padre: Raíz
el_avl.swap(tree_aux);
tree_aux.clear();
aux = el_avl.laraiz;
} // Padre: No raíz
else if (nodo_padre.left() == nodo) { // Soy hijo izquierda
el_avl.insert_left (nodo_padre, tree_aux);
aux = nodo_padre.left();
}
else {
el_avl.insert_right(nodo_padre, tree_aux);
aux = nodo_padre.right();
}
--tama;
aux = nodo_padre;
}
else { // CASO 4: DOS HIJOS
avl_node_t nodo_a_borrar;
// Búsqueda del anterior: Nodo más a la derecha del hijo izquierda
avl_node_t anterior = nodo.elnodo->izda;
while (!anterior.right().null())
anterior = anterior.elnodo->dcha;
auto valor_anterior(*anterior);
// Implementación recursiva
nodo_a_borrar = nodo;
erase(get<AVL_Dato>(*anterior));
*nodo_a_borrar = valor_anterior;
aux = nodo_a_borrar;
}
ajustar_altura(aux);
for (; !aux.null(); aux = aux.parent())
balancear(aux);
return salida;
}
}
AVL_TEMPLATE
typename AVL<T, comparar>::size_type AVL<T, comparar>::erase(
const T& val) {
const_iterator c_it = find(val);
if (c_it != cend()) {
erase(c_it);
return 1;
}
return 0;
}
AVL_TEMPLATE
typename AVL<T, comparar>::iterator AVL<T, comparar>::erase(
const_iterator first, const_iterator last) {
const_iterator it = first;
while (it != last)
it = erase(it);
return it;
}
/*------------------------- CAPACIDAD, SWAP, CLEAR --------------------------*/
/** Métodos para consultar capacidad, intercambio y limpieza del AVL.
*/
AVL_TEMPLATE
bool AVL<T, comparar>::empty() const {
return tama == 0;
}
AVL_TEMPLATE
typename AVL<T, comparar>::size_type AVL<T, comparar>::size() const {
return tama;
}
AVL_TEMPLATE
void AVL<T, comparar>::swap(AVL& x) {
el_avl.swap(x.el_avl);
}
AVL_TEMPLATE
void AVL<T, comparar>::clear() noexcept {
el_avl.clear();
}
/*--------------------- AVL: OPERACIONES DE BÚSQUEDA ------------------------*/
/** Métodos para realización de búsquedas O (log n).
* Todos los métodos están implementados en función del functor de orden cmp
* Existen versiones para iterator y const_iterator
*/
AVL_TEMPLATE
typename AVL<T, comparar>::iterator
AVL<T, comparar>::lower_bound(const T& val) {
avl_node_t nodo_out, nodo = el_avl.laraiz;
while (!nodo.null())
if (!cmp(get<AVL_Dato>(*nodo), val))
nodo_out = nodo, nodo = nodo.left();
else
nodo = nodo.right();
auto it = iterator(nodo_out);
it.iter.ptr = &el_avl;
return it;
}
AVL_TEMPLATE
typename AVL<T, comparar>::iterator
AVL<T, comparar>::upper_bound(const T& val) {
avl_node_t nodo_out, nodo = el_avl.laraiz;
while (!nodo.null())
if (cmp(val, get<AVL_Dato>(*nodo)))
nodo_out = nodo, nodo = nodo.left();
else
nodo = nodo.right();
auto it = iterator(nodo_out);
it.iter.ptr = &el_avl;
return it;
}
AVL_TEMPLATE
typename AVL<T, comparar>::iterator
AVL<T, comparar>::find(const T& val) {
iterator it = lower_bound(val);
return (it == end() || cmp(val, *it)) ? end() : it;
}
AVL_TEMPLATE
typename AVL<T, comparar>::const_iterator
AVL<T, comparar>::lower_bound(const T& val) const {
avl_node_t nodo_out, nodo = el_avl.laraiz;
while (!nodo.null())
if (!cmp(get<AVL_Dato>(*nodo), val))
nodo_out = nodo, nodo = nodo.left();
else
nodo = nodo.right();
auto c_it = const_iterator(nodo_out);
c_it.iter.ptr = &el_avl;
return c_it;
}
AVL_TEMPLATE
typename AVL<T, comparar>::const_iterator
AVL<T, comparar>::upper_bound(const T& val) const {
avl_node_t nodo_out, nodo = el_avl.laraiz;
while (!nodo.null())
if (cmp(val, get<AVL_Dato>(*nodo)))
nodo_out = nodo, nodo = nodo.left();
else
nodo = nodo.right();
auto c_it = const_iterator(nodo_out);
c_it.iter.ptr = &el_avl;
return c_it;
}
AVL_TEMPLATE
typename AVL<T, comparar>::const_iterator
AVL<T, comparar>::find(const T& val) const {
const_iterator c_it = lower_bound(val);
return (c_it == cend() || cmp(val, *c_it)) ? cend() : c_it;
}
/*------------------------ AVL: MÉTODOS DE RANGO ----------------------------*/
/** Métodos para el inicio y el final del rango del AVL.
* Se sigue la filosofía [begin(), end())
* Para los iteradores no reversos:
* begin(), cbegin() apunta al primer dato del árbol (si hay) o al nodo nulo
* end(), cend() apunta al nodo nulo
* Para los iteradores reversos:
* rbegin(), crbegin() equivale a --end(), --cend() respectivamente.
* rend(), crend() equivale a --begin(), --cbegin() respectivamente.
*/
AVL_TEMPLATE
typename AVL<T, comparar>::iterator AVL<T, comparar>::begin() {
iterator it;
it.iter = el_avl.begin_inorder();
return it;
}
AVL_TEMPLATE
typename AVL<T, comparar>::iterator AVL<T, comparar>::end() {
iterator it;
it.iter = el_avl.end_inorder();
return it;
}
AVL_TEMPLATE
typename AVL<T, comparar>::const_iterator AVL<T, comparar>::cbegin() {
const_iterator it;
it.iter = el_avl.begin_inorder();
return it;
}
AVL_TEMPLATE
typename AVL<T, comparar>::const_iterator AVL<T, comparar>::cend() {
const_iterator it;
it.iter = el_avl.end_inorder();
return it;
}
AVL_TEMPLATE
typename AVL<T, comparar>::reverse_iterator AVL<T, comparar>::rbegin() {
return reverse_iterator(end());
}
AVL_TEMPLATE
typename AVL<T, comparar>::reverse_iterator AVL<T, comparar>::rend() {
return reverse_iterator(begin());
}
AVL_TEMPLATE
typename AVL<T, comparar>::const_reverse_iterator AVL<T, comparar>::crbegin() {
return const_reverse_iterator(cend());
}
AVL_TEMPLATE
typename AVL<T, comparar>::const_reverse_iterator AVL<T, comparar>::crend() {
return const_reverse_iterator(cbegin());
}
/*----------------------------- AVL: ITERATOR -------------------------------*/
/** Implementación del iterador estándar.
* El iterador se comporta como un iterador bidireccional.
* El iterador se basa en el funcionamiento del inorder_iterator de bintree.
*/
AVL_TEMPLATE
AVL<T, comparar>::iterator::iterator(avl_node_t& n) : iter(n) {}
AVL_TEMPLATE
typename AVL<T, comparar>::iterator
AVL<T, comparar>::iterator::operator++(int) {
auto copia(*this);
++iter;
return copia;
}
AVL_TEMPLATE
typename AVL<T, comparar>::iterator&
AVL<T, comparar>::iterator::operator++() {
++iter;
return *this;
}
AVL_TEMPLATE
typename AVL<T, comparar>::iterator
AVL<T, comparar>::iterator::operator--(int) {
auto copia(*this);
--iter;
return copia;
}
AVL_TEMPLATE
typename AVL<T, comparar>::iterator&
AVL<T, comparar>::iterator::operator--() {
--iter;
return *this;
}
AVL_TEMPLATE
T& AVL<T, comparar>::iterator::operator*() {
return get<AVL_Dato>(*iter);
}
AVL_TEMPLATE
T* AVL<T, comparar>::iterator::operator->() {
return addressof(get<AVL_Dato>(*iter));
}
AVL_TEMPLATE
bool AVL<T, comparar>::iterator::operator==(const iterator& it) const {
return iter == it.iter;
}
AVL_TEMPLATE
bool AVL<T, comparar>::iterator::operator!=(const iterator& it) const {
return iter != it.iter;
}
/*----------------------------- AVL: CONST_ITERATOR -------------------------*/
/** Implementación del iterador constante.
* El iterador se comporta como un iterador bidireccional.
* El iterador se basa en el funcionamiento del inorder_iterator de bintree.
*/
AVL_TEMPLATE
AVL<T, comparar>::const_iterator::const_iterator(const iterator &it)
: iter(it.iter) {}
AVL_TEMPLATE
AVL<T, comparar>::const_iterator::const_iterator(avl_node_t &n) : iter(n) {}
AVL_TEMPLATE
typename AVL<T, comparar>::const_iterator
AVL<T, comparar>::const_iterator::operator++(int) {
auto copia(*this);
++iter;
return copia;
}
AVL_TEMPLATE
typename AVL<T, comparar>::const_iterator&
AVL<T, comparar>::const_iterator::operator++() {
++iter;
return *this;
}
AVL_TEMPLATE
typename AVL<T, comparar>::const_iterator
AVL<T, comparar>::const_iterator::operator--(int) {
auto copia(*this);
--iter;
return copia;
}
AVL_TEMPLATE
typename AVL<T, comparar>::const_iterator&
AVL<T, comparar>::const_iterator::operator--() {
--iter;
return *this;
}
AVL_TEMPLATE
const T& AVL<T, comparar>::const_iterator::operator*() const {
auto it = const_cast<typename avl_tree_t::inorder_iterator&>(iter);
return get<AVL_Dato>(*it);
}
AVL_TEMPLATE
const T* AVL<T, comparar>::const_iterator::operator->() const {
auto it = const_cast<typename avl_tree_t::inorder_iterator&>(iter);
return addressof(get<AVL_Dato>(*it));
}
AVL_TEMPLATE
bool AVL<T, comparar>::const_iterator::operator==(const const_iterator& it)
const {
return iter == it.iter;
}
AVL_TEMPLATE
bool AVL<T, comparar>::const_iterator::operator!=(const const_iterator& it)
const {
return iter != it.iter;
}
|
mobius-software-ltd/iotbroker.cloud-cpp-client | iot-protocols/amqp/classes/tlv/variable/amqptlvvariable.cpp | /**
* Mobius Software LTD
* Copyright 2015-2018, Mobius Software LTD
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
#include "amqptlvvariable.h"
AMQPTLVVariable::AMQPTLVVariable(AMQPType *type, ByteArray value) : TLVAMQP(new AMQPSimpleConstructor(type))
{
this->value = value;
this->width = (this->value.getSize() > 255) ? 4 : 1;
}
ByteArray AMQPTLVVariable::getData()
{
ByteArray widthData = ByteArray();
if (this->width == 1) {
widthData.writeChar(this->value.getSize());
} else if (this->width == 4) {
widthData.writeInt(this->value.getSize());
}
ByteArray bytes = ByteArray();
bytes.writeRawData(this->constructor->getData().getByteArray());
bytes.writeRawData(widthData.getByteArray());
if (this->getValue().getSize() > 0) {
bytes.writeRawData(this->getValue().getByteArray());
}
return bytes;
}
int AMQPTLVVariable::getLength()
{
return this->value.getSize() + this->constructor->getLength() + this->width;
}
ByteArray AMQPTLVVariable::getValue()
{
return this->value;
}
QString AMQPTLVVariable::description()
{
return QString(this->getValue().getByteArray());
}
|
Galland/kivy | examples/demo/multistroke/helpers.py | <reponame>Galland/kivy<gh_stars>1000+
__all__ = ('InformationPopup', )
from kivy.uix.popup import Popup
from kivy.properties import StringProperty
from kivy.factory import Factory
from kivy.lang import Builder
from kivy.clock import Clock
Builder.load_string('''
<InformationPopup>:
auto_dismiss: True
size_hint: None, None
size: 400, 200
on_open: root.dismiss_trigger()
title: root.title
Label:
text: root.text
''')
class InformationPopup(Popup):
title = StringProperty('Information')
text = StringProperty('')
def __init__(self, time=1.5, **kwargs):
super(InformationPopup, self).__init__(**kwargs)
self.dismiss_trigger = Clock.create_trigger(self.dismiss, time)
Factory.register('InformationPopup', cls=InformationPopup)
|
YiqunPeng/leetcode_pro | solutions/750_number_of_corner_rectangles.py | <gh_stars>0
class Solution:
def countCornerRectangles(self, grid: List[List[int]]) -> int:
m, n = len(grid), len(grid[0])
res = 0
rows = []
for row in grid:
curr = set()
for i in range(n):
if row[i] == 1:
curr.add(i)
for prev in rows:
v = len(curr & prev)
res += (v - 1) * v // 2
rows.append(curr)
return res
|
kiereleaseuser/optaplanner | optaplanner-core/src/main/java/org/optaplanner/core/impl/heuristic/selector/move/decorator/CachingMoveSelector.java | /*
* Copyright 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.impl.heuristic.selector.move.decorator;
import java.util.Iterator;
import org.optaplanner.core.impl.heuristic.selector.common.SelectionCacheType;
import org.optaplanner.core.impl.heuristic.selector.common.iterator.CachedListRandomIterator;
import org.optaplanner.core.impl.heuristic.selector.entity.decorator.CachingEntitySelector;
import org.optaplanner.core.impl.heuristic.selector.move.MoveSelector;
import org.optaplanner.core.impl.heuristic.selector.value.decorator.CachingValueSelector;
import org.optaplanner.core.impl.move.Move;
/**
* A {@link MoveSelector} that caches the result of its child {@link MoveSelector}.
* <p/>
* Keep this code in sync with {@link CachingEntitySelector} and {@link CachingValueSelector}.
*/
public class CachingMoveSelector extends AbstractCachingMoveSelector {
protected final boolean randomSelection;
public CachingMoveSelector(MoveSelector childMoveSelector, SelectionCacheType cacheType, boolean randomSelection) {
super(childMoveSelector, cacheType);
this.randomSelection = randomSelection;
}
// ************************************************************************
// Worker methods
// ************************************************************************
public boolean isNeverEnding() {
// CachedListRandomIterator is neverEnding
return randomSelection;
}
public Iterator<Move> iterator() {
if (!randomSelection) {
return cachedMoveList.iterator();
} else {
return new CachedListRandomIterator<Move>(cachedMoveList, workingRandom);
}
}
@Override
public String toString() {
return "Caching(" + childMoveSelector + ")";
}
}
|
gdgib/gearbox | gb-command/src/main/java/com/g2forge/gearbox/command/converter/IMethodArgument.java | <filename>gb-command/src/main/java/com/g2forge/gearbox/command/converter/IMethodArgument.java
package com.g2forge.gearbox.command.converter;
import java.lang.reflect.Type;
import com.g2forge.habitat.metadata.value.subject.ISubject;
public interface IMethodArgument<T> {
public T get();
public Type getGenericType();
public ISubject getMetadata();
public String getName();
public Class<T> getType();
} |
apache/sis | application/sis-javafx/src/main/java/org/apache/sis/gui/SystemMonitor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.gui;
import java.util.Locale;
import java.util.function.UnaryOperator;
import javafx.event.EventHandler;
import javafx.event.EventType;
import javafx.stage.WindowEvent;
import javafx.stage.Stage;
import javafx.scene.Scene;
import javafx.scene.control.Tab;
import javafx.scene.control.TabPane;
import org.apache.sis.gui.dataset.LogViewer;
import org.apache.sis.internal.gui.Resources;
import org.apache.sis.internal.gui.DataStoreOpener;
import org.apache.sis.internal.gui.io.FileAccessView;
import org.apache.sis.internal.storage.io.ChannelFactory;
import org.apache.sis.util.resources.Vocabulary;
/**
* Shows the "System monitor" window.
*
* @author <NAME> (Geomatys)
* @version 1.2
* @since 1.2
* @module
*/
final class SystemMonitor implements EventHandler<WindowEvent> {
/**
* The provider of wrappers around channels used for reading data.
* Those wrappers are used for listening to file accesses.
*
* @see DataStoreOpener#setFactoryWrapper(UnaryOperator)
*/
private final UnaryOperator<ChannelFactory> listener;
/**
* Creates new event handler.
*/
private SystemMonitor(final UnaryOperator<ChannelFactory> listener) {
this.listener = listener;
}
/**
* Invoked when the system monitor window is shown or hidden.
* This method starts or stops listening to read events on channels.
*/
@Override
public void handle(final WindowEvent event) {
final EventType<WindowEvent> type = event.getEventType();
UnaryOperator<ChannelFactory> wrapper;
if (WindowEvent.WINDOW_SHOWN.equals(type)) {
wrapper = listener;
} else if (WindowEvent.WINDOW_HIDDEN.equals(type)) {
wrapper = null;
} else {
return;
}
DataStoreOpener.setFactoryWrapper(wrapper);
}
/**
* Creates the system monitor window.
*
* @param parent the parent window.
* @param locale the locale, or {@code null} for default.
*/
static Stage create(final Stage parent, final Locale locale) {
final Resources resources = Resources.forLocale(locale);
final Vocabulary vocabulary = Vocabulary.getResources(locale);
final FileAccessView files = new FileAccessView(resources, vocabulary);
final LogViewer logging = new LogViewer();
logging.systemLogs.set(true);
/*
* Creates the tab pane.
*/
final Tab fileTab = new Tab(resources .getString(Resources.Keys.FileAccesses), files.getView());
final Tab logTab = new Tab(vocabulary.getString(Vocabulary.Keys.Logs), logging.getView());
fileTab.setClosable(false);
logTab .setClosable(false);
final TabPane panes = new TabPane(fileTab, logTab);
/*
* Create the window.
*/
final Stage w = new Stage();
w.setTitle(resources.getString(Resources.Keys.SystemMonitor) + " — Apache SIS");
w.getIcons().setAll(parent.getIcons());
w.setScene(new Scene(panes));
w.setMinWidth (400);
w.setMinHeight(500); // For preventing logging details to hide logging message table.
w.setWidth (800);
w.setHeight(600);
/*
* Install listeners.
*/
final SystemMonitor handler = new SystemMonitor(files);
w.setOnShown (handler);
w.setOnHidden(handler);
parent.setOnHidden((e) -> w.hide());
return w;
}
}
|
damianCrow/React-dashboard | src/components/atoms/ClockHand/index.stories.js | import React from 'react'
import { storiesOf } from '@storybook/react'
import ClockHand from '.'
storiesOf('ClockHand', module)
.add('default', () => (
<ClockHand />
))
.add('reverse', () => (
<ClockHand reverse />
))
.add('height', () => (
<ClockHand height={100} />
))
.add('invalid', () => (
<ClockHand invalid />
))
.add('type textarea', () => (
<ClockHand type="textarea" />
))
.add('type checkbox', () => (
<ClockHand type="checkbox" />
))
.add('type radio', () => (
<ClockHand type="radio" />
))
.add('type select', () => (
<ClockHand type="select">
<option>Option 1</option>
<option>Option 2</option>
<option>Option 3</option>
</ClockHand>
))
|
Gravitational-Field/Dive-In-Java | 03_JavaWeb/code/16_json_ajax_i18n/src/com/lzj/json/PersonListType.java | package com.lzj.json;
import com.google.gson.reflect.TypeToken;
import com.lzj.pojo.Person;
import java.util.ArrayList;
/**
* @ClassName PersonListType
* @Description: TODO
* @Author Keen
* @DATE 2021/1/5 15:00
* @Version 1.0
**/
public class PersonListType extends TypeToken<ArrayList<Person>> {
}
|
MasterOogwayis/spring | thinking-in-spring/annotation/src/main/java/com/demo/spring/annotation/EvenProfileConditional.java | package com.demo.spring.annotation;
import org.springframework.context.annotation.Condition;
import org.springframework.context.annotation.ConditionContext;
import org.springframework.core.env.Environment;
import org.springframework.core.env.Profiles;
import org.springframework.core.type.AnnotatedTypeMetadata;
/**
* @author ZhangShaowei on 2021/11/16 9:35
*/
public class EvenProfileConditional implements Condition {
@Override
public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) {
Environment environment = context.getEnvironment();
return environment.acceptsProfiles(Profiles.of("two"));
}
}
|
msmygit/nosqlbench | driver-web/src/main/java/io/nosqlbench/driver/webdriver/WebDriverCmdState.java | <filename>driver-web/src/main/java/io/nosqlbench/driver/webdriver/WebDriverCmdState.java<gh_stars>100-1000
package io.nosqlbench.driver.webdriver;
public class WebDriverCmdState {
private final WebDriverAction action;
private final long cycle;
public WebDriverCmdState(WebDriverAction action, long cycle) {
this.action = action;
this.cycle = cycle;
}
}
|
profmikegreene/HAXcms | build/es5-amd/node_modules/@vaadin/vaadin-text-field/vaadin-number-field.js | define(["./theme/lumo/vaadin-number-field.js"], function (_vaadinNumberField) {
"use strict";
}); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.