repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
miksa1987/Beast.Mode
beast.mode/src/components/post/MobileViewpost.js
import React from 'react' import { Input, Image, Icon } from 'semantic-ui-react' import { Link } from 'react-router-dom' import Comments from './Comments' import LikeButton from '../universal/LikeButton' import './MobileViewpost.css' const MobileViewpost = (props) => { if(!props.post.content) { return ( <div></div> ) } return ( <div className='viewpost-element'> <table> <tbody> <tr> <td> <div className='div-style'> {props.post.user.picture && props.post.user.picture !== '' ? <Image width='32px' height='32px' circular src={props.post.user.picture} /> : <Icon name='user' /> } </div> </td> <td> <div className='div-style'> <strong><Link to={`/profile/${props.post.user.id}`}>{props.post.user.username}</Link></strong> </div> </td> {props.post.type === 'doneworkout' ? <td><div className='div-style'><p>{`did a workout`}</p></div></td> : null} </tr> </tbody> </table> <p>{props.post.content}</p> <img src={props.post.picture} width='100%' alt='pic' /> <Comments comments={props.post.comments} showAll={true} postid={props.post._id} /> <table><tbody> <tr> <td> <LikeButton like={props.like} likes={props.post.likes.length} id={props.post._id} type={props.post.type} /> </td> <td width='100%'> <form onSubmit={props.sendComment}> <Input fluid size='small' icon={{ name: 'comment' }} {...props.comment} placeholder='Comment' /> </form> </td> </tr> </tbody></table> </div> ) } export default MobileViewpost
cjellick/rancher
pkg/controllers/user/logging/deployer/upgradeimpl.go
package deployer import ( "fmt" "reflect" "strings" "time" versionutil "github.com/rancher/rancher/pkg/catalog/utils" "github.com/rancher/rancher/pkg/controllers/user/helm/common" loggingconfig "github.com/rancher/rancher/pkg/controllers/user/logging/config" "github.com/rancher/rancher/pkg/project" appsv1 "github.com/rancher/types/apis/apps/v1" v1 "github.com/rancher/types/apis/core/v1" v3 "github.com/rancher/types/apis/management.cattle.io/v3" projectv3 "github.com/rancher/types/apis/project.cattle.io/v3" "github.com/rancher/types/config" "github.com/pkg/errors" "github.com/rancher/types/namespace" apierrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/labels" ) var ( ServiceName = "logging" waitCatalogSyncInterval = 60 * time.Second ) type LoggingService struct { clusterName string clusterLister v3.ClusterLister catalogLister v3.CatalogLister projectLister v3.ProjectLister templateLister v3.CatalogTemplateLister daemonsets appsv1.DaemonSetInterface secrets v1.SecretInterface appDeployer *AppDeployer } func NewService() *LoggingService { return &LoggingService{} } func (l *LoggingService) Init(cluster *config.UserContext) { ad := &AppDeployer{ AppsGetter: cluster.Management.Project, AppsLister: cluster.Management.Project.Apps("").Controller().Lister(), Namespaces: cluster.Core.Namespaces(metav1.NamespaceAll), } l.clusterName = cluster.ClusterName l.clusterLister = cluster.Management.Management.Clusters("").Controller().Lister() l.catalogLister = cluster.Management.Management.Catalogs(metav1.NamespaceAll).Controller().Lister() l.projectLister = cluster.Management.Management.Projects(cluster.ClusterName).Controller().Lister() l.templateLister = cluster.Management.Management.CatalogTemplates(metav1.NamespaceAll).Controller().Lister() l.daemonsets = cluster.Apps.DaemonSets(loggingconfig.LoggingNamespace) l.secrets = cluster.Core.Secrets(loggingconfig.LoggingNamespace) l.appDeployer = ad } func (l *LoggingService) Version() (string, error) { return loggingconfig.RancherLoggingInitVersion(), nil } func (l *LoggingService) Upgrade(currentVersion string) (string, error) { appName := loggingconfig.AppName templateID := loggingconfig.RancherLoggingTemplateID() template, err := l.templateLister.Get(namespace.GlobalNamespace, templateID) if err != nil { return "", errors.Wrapf(err, "get template %s failed", templateID) } templateVersion, err := versionutil.LatestAvailableTemplateVersion(template) if err != nil { return "", err } newFullVersion := fmt.Sprintf("%s-%s", templateID, templateVersion.Version) if currentVersion == newFullVersion { return currentVersion, nil } // check cluster ready before upgrade, because helm will not retry if got cluster not ready error cluster, err := l.clusterLister.Get(metav1.NamespaceAll, l.clusterName) if err != nil { return "", fmt.Errorf("get cluster %s failed, %v", l.clusterName, err) } if !v3.ClusterConditionReady.IsTrue(cluster) { return "", fmt.Errorf("cluster %v not ready", l.clusterName) } //clean old version if !strings.Contains(currentVersion, templateID) { if err = l.removeLegacy(); err != nil { return "", err } } //upgrade old app defaultSystemProjects, err := l.projectLister.List(metav1.NamespaceAll, labels.Set(project.SystemProjectLabel).AsSelector()) if err != nil { return "", errors.Wrap(err, "list system project failed") } if len(defaultSystemProjects) == 0 { return "", errors.New("get system project failed") } systemProject := defaultSystemProjects[0] if systemProject == nil { return "", errors.New("get system project failed") } app, err := l.appDeployer.AppsLister.Get(systemProject.Name, appName) if err != nil { if apierrors.IsNotFound(err) { return newFullVersion, nil } return "", errors.Wrapf(err, "get app %s:%s failed", systemProject.Name, appName) } _, systemCatalogName, _, _, _, err := common.SplitExternalID(templateVersion.ExternalID) if err != nil { return "", err } systemCatalog, err := l.catalogLister.Get(metav1.NamespaceAll, systemCatalogName) if err != nil { return "", fmt.Errorf("get catalog %s failed, %v", systemCatalogName, err) } if !v3.CatalogConditionUpgraded.IsTrue(systemCatalog) || !v3.CatalogConditionRefreshed.IsTrue(systemCatalog) || !v3.CatalogConditionDiskCached.IsTrue(systemCatalog) { return "", fmt.Errorf("catalog %v not ready", systemCatalogName) } newApp := app.DeepCopy() newApp.Spec.ExternalID = templateVersion.ExternalID if !reflect.DeepEqual(newApp, app) { // add force upgrade to handle chart compatibility in different version projectv3.AppConditionForceUpgrade.Unknown(newApp) if _, err = l.appDeployer.AppsGetter.Apps(metav1.NamespaceAll).Update(newApp); err != nil { return "", errors.Wrapf(err, "update app %s:%s failed", app.Namespace, app.Name) } } return newFullVersion, nil } func (l *LoggingService) removeLegacy() error { op := metav1.DeletePropagationBackground errMsg := "failed to remove legacy logging %s %s:%s when upgrade" if err := l.daemonsets.Delete(loggingconfig.FluentdName, &metav1.DeleteOptions{PropagationPolicy: &op}); err != nil && !apierrors.IsNotFound(err) { return errors.Wrapf(err, errMsg, loggingconfig.LoggingNamespace, "daemonset", loggingconfig.FluentdName) } if err := l.daemonsets.Delete(loggingconfig.LogAggregatorName, &metav1.DeleteOptions{PropagationPolicy: &op}); err != nil && !apierrors.IsNotFound(err) { return errors.Wrapf(err, errMsg, loggingconfig.LoggingNamespace, "daemonset", loggingconfig.LogAggregatorName) } legacySSlConfigName := "sslconfig" legacyClusterConfigName := "cluster-logging" legacyProjectConfigName := "project-logging" if err := l.secrets.Delete(legacySSlConfigName, &metav1.DeleteOptions{PropagationPolicy: &op}); err != nil && !apierrors.IsNotFound(err) { return errors.Wrapf(err, errMsg, "serect", loggingconfig.LoggingNamespace, legacySSlConfigName) } if err := l.secrets.Delete(legacyClusterConfigName, &metav1.DeleteOptions{PropagationPolicy: &op}); err != nil && !apierrors.IsNotFound(err) { return errors.Wrapf(err, errMsg, "serect", loggingconfig.LoggingNamespace, legacyClusterConfigName) } if err := l.secrets.Delete(legacyProjectConfigName, &metav1.DeleteOptions{PropagationPolicy: &op}); err != nil && !apierrors.IsNotFound(err) { return errors.Wrapf(err, errMsg, "serect", loggingconfig.LoggingNamespace, legacyProjectConfigName) } return nil }
robotology-legacy/yarp1
src/experiments/mirror/GraspCapture/resource.h
//{{NO_DEPENDENCIES}} // Microsoft Developer Studio generated include file. // Used by GraspCapture.rc // #define IDM_ABOUTBOX 0x0010 #define IDD_ABOUTBOX 100 #define IDS_ABOUTBOX 101 #define IDD_GRASPCAPTURE_DIALOG 102 #define IDR_MAINFRAME 128 #define IDD_CAMERA 129 #define IDD_TRACKER 130 #define IDD_DATAGLOVE 131 #define IDB_GRASPING 133 #define IDB_TRACKER 134 #define IDB_GLOVE 136 #define IDD_MESSAGES 137 #define IDD_OPTIONS 138 #define IDC_CONNECT 1000 #define IDC_DISCONNECT 1001 #define IDC_ACQ_START 1002 #define IDC_ACQ_STOP 1003 #define IDC_LIVE_CAMERA 1004 #define IDC_LIVE_GLOVE 1005 #define IDC_LIVE_TRACKER 1006 #define IDC_CALIBRATION 1008 #define IDC_POS_X 1008 #define IDC_POS_Y 1009 #define IDC_DEBUG_WND 1009 #define IDC_POS_Z 1010 #define IDC_KILL 1010 #define IDC_OR_AZIMUTH 1011 #define IDC_FINGER_1 1011 #define IDC_OR_ELEVATION 1012 #define IDC_FINGER_2 1012 #define IDC_OR_ROLL 1013 #define IDC_FINGER_3 1013 #define IDC_FINGER_4 1014 #define IDC_MESS_BOX 1014 #define IDC_FINGER_5 1015 #define IDC_LOAD 1015 #define IDC_FINGER_6 1016 #define IDC_SAVE 1016 #define IDC_FINGER_7 1017 #define IDC_DEFAULT 1017 #define IDC_FINGER_8 1018 #define IDC_SETPATH 1018 #define IDC_FINGER_9 1019 #define IDC_FINGER_10 1020 #define IDC_PORT_N 1020 #define IDC_FINGER_11 1021 #define IDC_NET_N 1021 #define IDC_FINGER_12 1022 #define IDC_REFRESH 1022 #define IDC_SAVEPATH 1023 #define IDC_PREFIX 1025 #define IDC_FINGER_13 1026 #define IDC_FINGER_14 1027 #define IDC_FINGER_15 1028 #define IDC_ABD_1 1029 #define IDC_ABD_2 1030 #define IDC_ABD_3 1031 #define IDC_ABD_4 1032 #define IDC_PALM 1033 #define IDC_WR_ABD 1034 #define IDC_WR_FLX 1035 #define IDC_PRES_1 1036 #define IDC_PRES_2 1037 #define IDC_PRES_3 1038 #define IDC_PRES_4 1039 #define IDC_PUP_DIA 1040 #define IDC_PUP_X 1041 #define IDC_PUP_Y 1042 // Next default values for new objects // #ifdef APSTUDIO_INVOKED #ifndef APSTUDIO_READONLY_SYMBOLS #define _APS_NEXT_RESOURCE_VALUE 139 #define _APS_NEXT_COMMAND_VALUE 32771 #define _APS_NEXT_CONTROL_VALUE 1026 #define _APS_NEXT_SYMED_VALUE 101 #endif #endif
twinkle-cloud/twinkle-framework
twinkle-connector/src/main/java/com/twinkle/framework/connector/ConnectorManager.java
<gh_stars>1-10 package com.twinkle.framework.connector; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; import com.twinkle.framework.api.component.AbstractComponent; import com.twinkle.framework.api.config.Configurable; import com.twinkle.framework.api.constant.ExceptionCode; import com.twinkle.framework.api.exception.ConfigurationException; import com.twinkle.framework.configure.component.ComponentFactory; import com.twinkle.framework.connector.server.ServerConnector; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections4.CollectionUtils; import java.util.*; /** * Function: TODO ADD FUNCTION. <br/> * Reason: TODO ADD REASON. <br/> * Date: 2019-07-14 17:42<br/> * * @author chenxj * @see * @since JDK 1.8 */ @Slf4j public class ConnectorManager extends AbstractComponent implements Configurable { /** * Connector name list. */ private List<String> connectorNameList; /** * Connector Map. */ private Map<String, Connector> connectorMap = null; public ConnectorManager() { } @Override public void configure(JSONObject _conf) throws ConfigurationException { JSONArray tempNameArray = _conf.getJSONArray("ConnectorNames"); JSONArray tempConnectorArray = _conf.getJSONArray("Connectors"); if (CollectionUtils.isEmpty(tempNameArray) || CollectionUtils.isEmpty(tempConnectorArray)) { throw new ConfigurationException(ExceptionCode.CONNECTOR_MANDATORY_ATTR_MISSED, "ConnectorManager.configure(): Connectors is a mandatory parameter. "); } this.connectorNameList = new ArrayList<>(tempNameArray.size()); this.connectorMap = new HashMap<>(tempNameArray.size()); //Build the connector one by one. for (int i = 0; i < tempNameArray.size(); i++) { String tempItem = tempNameArray.getString(i); for (int j = 0; j < tempConnectorArray.size(); j++) { JSONObject tempObj = tempConnectorArray.getJSONObject(j); if (tempObj.getString("Name").equals(tempItem)) { StringBuilder tempBuilder = new StringBuilder(this.getFullPathName()); tempBuilder.append((char) 92); tempBuilder.append(tempItem); Connector tempConnector = ComponentFactory.getInstance().loadComponent(tempBuilder.toString(), tempObj); this.connectorNameList.add(tempItem); this.addConnector(tempItem, tempConnector); break; } } } } /** * Get the connector by name. * * @param _name * @return */ public Connector getConnector(String _name) { return connectorMap.get(_name); } /** * Add a connector into the manager center. * * @param _name * @param _connector */ private void addConnector(String _name, Connector _connector) { if (_connector instanceof ServerConnector) { ((ServerConnector) _connector).registerAsService(); } connectorMap.put(_name, _connector); } /** * Remove the specified connector. * * @param _name */ public void removeConnector(String _name) { connectorMap.remove(_name); } /** * Get the Iterator for the connector list. * * @return */ public Iterator getConnectors() { return connectorMap.values().iterator(); } }
3noch/lotus
cmd/lotus-chainwatch/sync.go
package main import ( "bytes" "container/list" "context" "sync" actors2 "github.com/filecoin-project/lotus/chain/actors" "github.com/filecoin-project/lotus/chain/address" "github.com/ipfs/go-cid" "github.com/filecoin-project/lotus/api" "github.com/filecoin-project/lotus/chain/store" "github.com/filecoin-project/lotus/chain/types" ) func runSyncer(ctx context.Context, api api.FullNode, st *storage) { notifs, err := api.ChainNotify(ctx) if err != nil { panic(err) } go func() { for notif := range notifs { for _, change := range notif { switch change.Type { case store.HCCurrent: fallthrough case store.HCApply: syncHead(ctx, api, st, change.Val) case store.HCRevert: log.Warnf("revert todo") } } } }() } type minerKey struct { addr address.Address act types.Actor stateroot cid.Cid } type minerInfo struct { state actors2.StorageMinerActorState info actors2.MinerInfo } func syncHead(ctx context.Context, api api.FullNode, st *storage, ts *types.TipSet) { addresses := map[address.Address]address.Address{} actors := map[address.Address]map[types.Actor]cid.Cid{} var alk sync.Mutex log.Infof("Getting headers / actors") toSync := map[cid.Cid]*types.BlockHeader{} toVisit := list.New() for _, header := range ts.Blocks() { toVisit.PushBack(header) } for toVisit.Len() > 0 { bh := toVisit.Remove(toVisit.Back()).(*types.BlockHeader) if _, seen := toSync[bh.Cid()]; seen || st.hasBlock(bh.Cid()) { continue } toSync[bh.Cid()] = bh addresses[bh.Miner] = address.Undef if len(toSync)%500 == 10 { log.Infof("todo: (%d) %s", len(toSync), bh.Cid()) } if len(bh.Parents) == 0 { continue } pts, err := api.ChainGetTipSet(ctx, types.NewTipSetKey(bh.Parents...)) if err != nil { log.Error(err) continue } for _, header := range pts.Blocks() { toVisit.PushBack(header) } } log.Infof("Syncing %d blocks", len(toSync)) log.Infof("Persisting actors") paDone := 0 par(50, maparr(toSync), func(bh *types.BlockHeader) { paDone++ if paDone%100 == 0 { log.Infof("pa: %d %d%%", paDone, (paDone*100)/len(toSync)) } if len(bh.Parents) == 0 { // genesis case ts, err := types.NewTipSet([]*types.BlockHeader{bh}) aadrs, err := api.StateListActors(ctx, ts) if err != nil { log.Error(err) return } par(50, aadrs, func(addr address.Address) { act, err := api.StateGetActor(ctx, addr, ts) if err != nil { log.Error(err) return } alk.Lock() _, ok := actors[addr] if !ok { actors[addr] = map[types.Actor]cid.Cid{} } actors[addr][*act] = bh.ParentStateRoot addresses[addr] = address.Undef alk.Unlock() }) return } pts, err := api.ChainGetTipSet(ctx, types.NewTipSetKey(bh.Parents...)) if err != nil { log.Error(err) return } changes, err := api.StateChangedActors(ctx, pts.ParentState(), bh.ParentStateRoot) if err != nil { log.Error(err) return } for a, act := range changes { addr, err := address.NewFromString(a) if err != nil { log.Error(err) return } alk.Lock() _, ok := actors[addr] if !ok { actors[addr] = map[types.Actor]cid.Cid{} } actors[addr][act] = bh.ParentStateRoot addresses[addr] = address.Undef alk.Unlock() } }) if err := st.storeActors(actors); err != nil { log.Error(err) return } log.Infof("Persisting miners") miners := map[minerKey]*minerInfo{} for addr, m := range actors { for actor, c := range m { if actor.Code != actors2.StorageMinerCodeCid { continue } miners[minerKey{ addr: addr, act: actor, stateroot: c, }] = &minerInfo{} } } par(50, kvmaparr(miners), func(it func() (minerKey, *minerInfo)) { k, info := it() astb, err := api.ChainReadObj(ctx, k.act.Head) if err != nil { log.Error(err) return } if err := info.state.UnmarshalCBOR(bytes.NewReader(astb)); err != nil { log.Error(err) return } ib, err := api.ChainReadObj(ctx, info.state.Info) if err != nil { log.Error(err) return } if err := info.info.UnmarshalCBOR(bytes.NewReader(ib)); err != nil { log.Error(err) return } }) if err := st.storeMiners(miners); err != nil { log.Error(err) return } log.Infof("Persisting headers") if err := st.storeHeaders(toSync, true); err != nil { log.Error(err) return } log.Infof("Getting messages") msgs, incls := fetchMessages(ctx, api, toSync) if err := st.storeMessages(msgs); err != nil { log.Error(err) return } if err := st.storeMsgInclusions(incls); err != nil { log.Error(err) return } log.Infof("Resolving addresses") for _, message := range msgs { addresses[message.To] = address.Undef addresses[message.From] = address.Undef } par(50, kmaparr(addresses), func(addr address.Address) { raddr, err := api.StateLookupID(ctx, addr, nil) if err != nil { log.Warn(err) return } alk.Lock() addresses[addr] = raddr alk.Unlock() }) if err := st.storeAddressMap(addresses); err != nil { log.Error(err) return } log.Infof("Sync done") } func fetchMessages(ctx context.Context, api api.FullNode, toSync map[cid.Cid]*types.BlockHeader) (map[cid.Cid]*types.Message, map[cid.Cid][]cid.Cid) { var lk sync.Mutex messages := map[cid.Cid]*types.Message{} inclusions := map[cid.Cid][]cid.Cid{} // block -> msgs par(50, maparr(toSync), func(header *types.BlockHeader) { msgs, err := api.ChainGetBlockMessages(ctx, header.Cid()) if err != nil { log.Error(err) return } vmm := make([]*types.Message, 0, len(msgs.Cids)) for _, m := range msgs.BlsMessages { vmm = append(vmm, m) } for _, m := range msgs.SecpkMessages { vmm = append(vmm, &m.Message) } lk.Lock() for _, message := range vmm { messages[message.Cid()] = message inclusions[header.Cid()] = append(inclusions[header.Cid()], message.Cid()) } lk.Unlock() }) return messages, inclusions }
NeocortexF/FlyingBase
Flight Department Client/src/secure/WorkingWindow3.java
<reponame>NeocortexF/FlyingBase<filename>Flight Department Client/src/secure/WorkingWindow3.java package secure; import java.awt.Color; import java.awt.Dimension; import java.awt.Font; import java.awt.Toolkit; import java.awt.event.ItemEvent; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Scanner; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.JOptionPane; import javax.swing.JTextField; import javax.swing.WindowConstants; import javax.swing.table.DefaultTableModel; public class WorkingWindow3 extends javax.swing.JFrame { public static final String FD_CHIEF_SURNAME = "Иванов"; public static final String FD_SUBCHIEF = "Петров"; public static final String SAFETY_MANAGER = "Сидоров"; public static final String FD_SUBCHIEF_ASSESSMENT = "Петров П.П."; public static final String FD_CHEIF_ASSESSMENT = "Иванов И.И."; public static final String SAFETY_MANAGER_ASSESSMENT = "Сидоров С.С."; private String globalIncNbr; private String corActFIO; private String talonCloseFIO; private String insideStr; private String folderName = ""; private boolean comboChanged = false; private Scanner scan; public WorkingWindow3(String str, int tabIndex) throws ParseException { initComponents(); insideStr = str; folderName = str; retrieve(str); jTabbedPane1.setSelectedIndex(tabIndex); hideIndexTextField(); tfKVS.getDocument().addDocumentListener(new MyDocumentListener()); tfVP.getDocument().addDocumentListener(new MyDocumentListener()); tpInfoFromGLazkov.getDocument().addDocumentListener(new MyDocumentListener()); tfRiskIndex.getDocument().addDocumentListener(new MyDocumentListener()); MyDocumentListener.setDocumentChanged(false); comboChanged = false; } private void retrieve(String str) throws ParseException { //заполняем таблицу DefaultTableModel dmWW = new DBUpdaterWorkingWindow().getData(str); tableGeneralInfo.setModel(dmWW); //глобальная переменная инкамин намбер globalIncNbr = tableGeneralInfo.getValueAt(0, 0).toString(); //заполняем остальные поля в нередактируемой зоне Object[] data = new DBUpdaterWorkingWindow().getEventsData(str); Color greenMatrix = new Color(34, 177, 76); Color yellowMatrix = new Color(255, 242, 0); Color orangeMatrix = new Color(255, 127, 39); Color redMatrix = new Color(237, 28, 36); if (data[0] == null) { } else { tfEvent1.setText(data[0].toString()); } if (data[1] == null) { } else { tfAirFASE1.setText(data[1].toString()); } if (data[2] == null) { } else { String parseAssesment = data[2].toString(); if (parseAssesment.contains("Низкий")) { tfAss1.setText("Низкий риск 1-10"); tfAss1.setBackground(greenMatrix); } else if (parseAssesment.contains("Умерен")) { tfAss1.setText("Умеренный риск 20-50"); tfAss1.setBackground(yellowMatrix); } else if (parseAssesment.contains("Высокий")) { tfAss1.setText("Высокий риск 100-102"); tfAss1.setBackground(orangeMatrix); } else if (parseAssesment.contains("Недоп")) { tfAss1.setText("Недопустимый риск 500-2500"); tfAss1.setBackground(redMatrix); } else if (parseAssesment.contains("учит")) { tfAss1.setText("Не учитывать"); tfAss1.setBackground(Color.WHITE); } else if (parseAssesment.contains("действ")) { tfAss1.setText("Не действительно"); tfAss1.setBackground(Color.WHITE); } else { tfAss1.setText(""); tfAss1.setBackground(Color.WHITE); } } if (data[3] == null) { } else { tfEvent2.setText(data[3].toString()); } if (data[4] == null) { } else { tfAirFASE2.setText(data[4].toString()); } if (data[5] == null) { } else { String parseAssesment = data[5].toString(); if (parseAssesment.contains("Низкий")) { tfAss2.setText("Низкий риск 1-10"); tfAss2.setBackground(greenMatrix); } else if (parseAssesment.contains("Умерен")) { tfAss2.setText("Умеренный риск 20-50"); tfAss2.setBackground(yellowMatrix); } else if (parseAssesment.contains("Высокий")) { tfAss2.setText("Высокий риск 100-102"); tfAss2.setBackground(orangeMatrix); } else if (parseAssesment.contains("Недоп")) { tfAss2.setText("Недопустимый риск 500-2500"); tfAss2.setBackground(redMatrix); } else if (parseAssesment.contains("учит")) { tfAss2.setText("Не учитывать"); tfAss2.setBackground(Color.WHITE); } else if (parseAssesment.contains("действ")) { tfAss2.setText("Не действительно"); tfAss2.setBackground(Color.WHITE); } else { tfAss2.setText(""); tfAss2.setBackground(Color.WHITE); } } if (data[6] == null) { } else { tfEvent3.setText(data[6].toString()); } if (data[7] == null) { } else { tfAirFASE3.setText(data[7].toString()); } if (data[8] == null) { } else { String parseAssesment = data[8].toString(); if (parseAssesment.contains("Низкий")) { tfAss3.setText("Низкий риск 1-10"); tfAss3.setBackground(greenMatrix); } else if (parseAssesment.contains("Умерен")) { tfAss3.setText("Умеренный риск 20-50"); tfAss3.setBackground(yellowMatrix); } else if (parseAssesment.contains("Высокий")) { tfAss3.setText("Высокий риск 100-102"); tfAss3.setBackground(orangeMatrix); } else if (parseAssesment.contains("Недоп")) { tfAss3.setText("Недопустимый риск 500-2500"); tfAss3.setBackground(redMatrix); } else if (parseAssesment.contains("учит")) { tfAss3.setText("Не учитывать"); tfAss3.setBackground(Color.WHITE); } else if (parseAssesment.contains("действ")) { tfAss3.setText("Не действительно"); tfAss3.setBackground(Color.WHITE); } else { tfAss3.setText(""); tfAss3.setBackground(Color.WHITE); } } if (data[9] == null) { } else { tfEvent4.setText(data[9].toString()); } if (data[10] == null) { } else { tfAirFASE4.setText(data[10].toString()); } if (data[11] == null) { } else { String parseAssesment = data[11].toString(); if (parseAssesment.contains("Низкий")) { tfAss4.setText("Низкий риск 1-10"); tfAss4.setBackground(greenMatrix); } else if (parseAssesment.contains("Умерен")) { tfAss4.setText("Умеренный риск 20-50"); tfAss4.setBackground(yellowMatrix); } else if (parseAssesment.contains("Высокий")) { tfAss4.setText("Высокий риск 100-102"); tfAss4.setBackground(orangeMatrix); } else if (parseAssesment.contains("Недоп")) { tfAss4.setText("Недопустимый риск 500-2500"); tfAss4.setBackground(redMatrix); } else if (parseAssesment.contains("учит")) { tfAss4.setText("Не учитывать"); tfAss4.setBackground(Color.WHITE); } else if (parseAssesment.contains("действ")) { tfAss4.setText("Не действительно"); tfAss4.setBackground(Color.WHITE); } else { tfAss4.setText(""); tfAss4.setBackground(Color.WHITE); } } if (data[12] == null) { } else { tpPrim.setText(data[12].toString()); } if (data[13] == null) { } else { tpAnalysis.setText(data[13].toString()); } if (data[14] == null) { } else { tpCorrectiveActions.setText(data[14].toString()); } if (data[15] == null) { } else { tfCommanderAE.setText(data[15].toString()); } if (data[16] == null) { } else { tfAnalysisDate.setText(data[16].toString()); } //заполняем поля в нижней панеле (все 3 вкладки) Object[] lowerPanelData = new DBUpdaterWorkingWindow().getLowerPanelData(str); if (lowerPanelData[0] == null) { } else { tfKVS.setText(lowerPanelData[0].toString()); } if (lowerPanelData[1] == null) { } else { tfVP.setText(lowerPanelData[1].toString()); } if (lowerPanelData[2] == null) { } else { String tempStr = lowerPanelData[2].toString(); if (tempStr.contains("КВС")) { comboPF.setSelectedIndex(1); } else if (tempStr.contains("2П")) { comboPF.setSelectedIndex(2); } } if (lowerPanelData[3] == null) { } else { String tempStr = lowerPanelData[3].toString(); if (tempStr.equals("2")) { comboAE.setSelectedIndex(1); } else if (tempStr.equals("3")) { comboAE.setSelectedIndex(2); } else if (tempStr.equals("4")) { comboAE.setSelectedIndex(3); } else if (tempStr.equals("5")) { comboAE.setSelectedIndex(4); } else if (tempStr.equals("6")) { comboAE.setSelectedIndex(5); } else if (tempStr.equals("7")) { comboAE.setSelectedIndex(6); } else if (tempStr.equals("8")) { comboAE.setSelectedIndex(7); } else if (tempStr.equals("10")) { comboAE.setSelectedIndex(8); } else if (tempStr.equals("12")) { comboAE.setSelectedIndex(9); } } if (lowerPanelData[4] == null) { } else { String tempStr = lowerPanelData[4].toString(); if (tempStr.contains("Анализ")) { comboNesActions.setSelectedIndex(1); } else if (tempStr.contains("ИБП")) { comboNesActions.setSelectedIndex(3); } else if (tempStr.contains("ЛО")) { comboNesActions.setSelectedIndex(2); } else if (tempStr.contains("ПРАПИ")) { comboNesActions.setSelectedIndex(4); } else if (tempStr.contains("учит")) { comboNesActions.setSelectedIndex(5); } else if (tempStr.contains("действ")) { comboNesActions.setSelectedIndex(6); } else { comboNesActions.setSelectedIndex(0); } } if (lowerPanelData[5] == null) { } else { comboControl.setSelectedItem(lowerPanelData[5]); } if (lowerPanelData[6] == null) { } else { Object tempDate = lowerPanelData[6]; String stringDate = tempDate.toString(); SimpleDateFormat parser = new SimpleDateFormat("dd.MM.yyyy"); Date date = (Date) parser.parse(stringDate); dateCorDeadline.setDate(date); } if (lowerPanelData[7] == null) { } else { tpInfoFromGLazkov.setText(lowerPanelData[7].toString()); } //радио кнопка одобренный действий получение if (lowerPanelData[8] == null) { rbNotCA.setSelected(true); } else { String tempStr = lowerPanelData[8].toString(); if (tempStr.contains(FD_CHIEF_SURNAME)) { rbGlazkov.setSelected(true); } else if (tempStr.contains(FD_SUBCHIEF)) { rbRedkin.setSelected(true); } else if (tempStr.equals("") || tempStr.equals(" ")) { rbNotCA.setSelected(true); } } if (lowerPanelData[10] == null) { rbNotClosure.setSelected(true); } else { String tempStr = lowerPanelData[10].toString(); if (tempStr.contains(FD_CHIEF_SURNAME)) { rbZubkov.setSelected(true); } else if (tempStr.contains(SAFETY_MANAGER)) { rbChebrov.setSelected(true); } else if (tempStr.equals("") || tempStr.equals(" ")) { rbNotClosure.setSelected(true); } } if (lowerPanelData[12] == null) { } else { String tempStr = lowerPanelData[12].toString(); int risk = Integer.parseInt(tempStr); if (risk == 0) { tfRiskIndex.setText("Нет оценки или не действительно"); tfRiskIndexFD.setText("Нет оценки или не действительно"); } else if (risk <= 19) { setTfRiskIndex("Низкий риск - " + risk, greenMatrix); setTfRiskIndexFD("Низкий риск - " + risk, greenMatrix); } else if (risk > 19 && risk <= 99) { setTfRiskIndex("Умеренный риск - " + risk, yellowMatrix); setTfRiskIndexFD("Умеренный риск - " + risk, yellowMatrix); } else if (risk >= 100 && risk < 104) { setTfRiskIndex("Высокий риск - " + risk, orangeMatrix); setTfRiskIndexFD("Высокий риск - " + risk, orangeMatrix); } else if (risk >= 105) { setTfRiskIndex("Недопустимый риск - " + risk, redMatrix); setTfRiskIndexFD("Недопустимый риск - " + risk, redMatrix); } else { tfRiskIndex.setText("Оценка не определена"); tfRiskIndexFD.setText("Оценка не определена"); } } } private void hideIndexTextField() { tfIndex.setVisible(false); tfIndexFD.setVisible(false); } public JTextField getTfRiskIndex() { return tfRiskIndex; } public void setTfRiskIndex(String tfRiskIndex, Color color) { this.tfRiskIndex.setText(tfRiskIndex); this.tfRiskIndex.setBackground(color); } public void setTfRiskIndexFD(String tfRiskIndex, Color color) { this.tfRiskIndexFD.setText(tfRiskIndex); this.tfRiskIndexFD.setBackground(color); } public JTextField getTfIndex() { return tfIndex; } public void setTfIndex(String index) { this.tfIndex.setText(index); } public void setTfIndexFD(String index) { this.tfIndexFD.setText(index); } public void setKVSid(String id) { this.tfKVS.setText(id); } public void setVPid(String id) { this.tfVP.setText(id); } //взаимодействие между окнами //создаем ссылку на родителя private Welcome parent; public void setParent(Welcome w) { this.parent = w; } @Override public Welcome getParent() { return this.parent; } @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jPanel4 = new javax.swing.JPanel(); buttonGroup1 = new javax.swing.ButtonGroup(); buttonGroup2 = new javax.swing.ButtonGroup(); jPanel8 = new javax.swing.JPanel(); jPanel1 = new javax.swing.JPanel(); jScrollPane1 = new javax.swing.JScrollPane(); tableGeneralInfo = new javax.swing.JTable(); jLabel1 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); tfEvent1 = new javax.swing.JTextField(); tfAss1 = new javax.swing.JTextField(); tfAirFASE1 = new javax.swing.JTextField(); jLabel3 = new javax.swing.JLabel(); tfEvent2 = new javax.swing.JTextField(); tfAirFASE2 = new javax.swing.JTextField(); tfAss2 = new javax.swing.JTextField(); tfEvent3 = new javax.swing.JTextField(); tfAirFASE3 = new javax.swing.JTextField(); tfAss3 = new javax.swing.JTextField(); tfEvent4 = new javax.swing.JTextField(); tfAirFASE4 = new javax.swing.JTextField(); tfAss4 = new javax.swing.JTextField(); jScrollPane2 = new javax.swing.JScrollPane(); tpPrim = new javax.swing.JTextPane(); jLabel11 = new javax.swing.JLabel(); jLabel12 = new javax.swing.JLabel(); jScrollPane3 = new javax.swing.JScrollPane(); tpAnalysis = new javax.swing.JTextPane(); jLabel13 = new javax.swing.JLabel(); jScrollPane4 = new javax.swing.JScrollPane(); tpCorrectiveActions = new javax.swing.JTextPane(); jLabel14 = new javax.swing.JLabel(); tfCommanderAE = new javax.swing.JTextField(); jLabel19 = new javax.swing.JLabel(); tfAnalysisDate = new javax.swing.JTextField(); jButton1 = new javax.swing.JButton(); jTabbedPane1 = new javax.swing.JTabbedPane(); jPanel2 = new javax.swing.JPanel(); jLabel4 = new javax.swing.JLabel(); tfKVS = new javax.swing.JTextField(); tfVP = new javax.swing.JTextField(); jLabel5 = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); comboPF = new javax.swing.JComboBox(); comboAE = new javax.swing.JComboBox(); jLabel7 = new javax.swing.JLabel(); comboNesActions = new javax.swing.JComboBox(); jLabel8 = new javax.swing.JLabel(); jLabel9 = new javax.swing.JLabel(); comboControl = new javax.swing.JComboBox(); dateCorDeadline = new com.toedter.calendar.JDateChooser(); bSendToAE = new javax.swing.JButton(); jLabel10 = new javax.swing.JLabel(); bCrew = new javax.swing.JButton(); bInfoVp = new javax.swing.JButton(); bInfoKvs = new javax.swing.JButton(); bFiles = new javax.swing.JButton(); jLabel20 = new javax.swing.JLabel(); jPanel3 = new javax.swing.JPanel(); jLabel15 = new javax.swing.JLabel(); jScrollPane5 = new javax.swing.JScrollPane(); tpInfoFromGLazkov = new javax.swing.JTextPane(); jLabel16 = new javax.swing.JLabel(); rbNotCA = new javax.swing.JRadioButton(); rbGlazkov = new javax.swing.JRadioButton(); rbRedkin = new javax.swing.JRadioButton(); bAcceptCorAct = new javax.swing.JButton(); bCorrActGreen = new javax.swing.JButton(); jPanel9 = new javax.swing.JPanel(); bMatrixFD = new javax.swing.JButton(); tfRiskIndexFD = new javax.swing.JTextField(); tfIndexFD = new javax.swing.JTextField(); jPanel5 = new javax.swing.JPanel(); jLabel22 = new javax.swing.JLabel(); jPanel6 = new javax.swing.JPanel(); tfRiskIndex = new javax.swing.JTextField(); jLabel18 = new javax.swing.JLabel(); tfIndex = new javax.swing.JTextField(); bMatrix = new javax.swing.JButton(); jPanel7 = new javax.swing.JPanel(); rbZubkov = new javax.swing.JRadioButton(); rbNotClosure = new javax.swing.JRadioButton(); bAcceptClosure = new javax.swing.JButton(); rbChebrov = new javax.swing.JRadioButton(); javax.swing.GroupLayout jPanel4Layout = new javax.swing.GroupLayout(jPanel4); jPanel4.setLayout(jPanel4Layout); jPanel4Layout.setHorizontalGroup( jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 1053, Short.MAX_VALUE) ); jPanel4Layout.setVerticalGroup( jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 288, Short.MAX_VALUE) ); javax.swing.GroupLayout jPanel8Layout = new javax.swing.GroupLayout(jPanel8); jPanel8.setLayout(jPanel8Layout); jPanel8Layout.setHorizontalGroup( jPanel8Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 100, Short.MAX_VALUE) ); jPanel8Layout.setVerticalGroup( jPanel8Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGap(0, 100, Short.MAX_VALUE) ); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); setTitle("Flying Base: Flight Department "); setIconImage(Toolkit.getDefaultToolkit().getImage(getClass().getResource("FB Icon.png"))); addWindowListener(new java.awt.event.WindowAdapter() { public void windowClosing(java.awt.event.WindowEvent evt) { formWindowClosing(evt); } }); jPanel1.setBorder(new javax.swing.border.SoftBevelBorder(javax.swing.border.BevelBorder.RAISED)); tableGeneralInfo.setFont(new java.awt.Font("Tahoma", 0, 14)); tableGeneralInfo.setModel(new javax.swing.table.DefaultTableModel( new Object[][]{ }, new String[]{ "№", "Дата обработки", "Дата полета", "Борт", "Рейс", "Взлет", "Посадка" } )); jScrollPane1.setViewportView(tableGeneralInfo); jLabel1.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel1.setText("События полета"); jLabel2.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel2.setText("Предварительная оценка риска"); tfEvent1.setEditable(false); tfEvent1.setFont(new java.awt.Font("Tahoma", 0, 14)); tfAss1.setEditable(false); tfAss1.setFont(new java.awt.Font("Tahoma", 0, 14)); tfAirFASE1.setEditable(false); tfAirFASE1.setFont(new java.awt.Font("Tahoma", 0, 14)); jLabel3.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel3.setText("AirFASE №"); tfEvent2.setEditable(false); tfEvent2.setFont(new java.awt.Font("Tahoma", 0, 14)); tfAirFASE2.setEditable(false); tfAirFASE2.setFont(new java.awt.Font("Tahoma", 0, 14)); tfAss2.setEditable(false); tfAss2.setFont(new java.awt.Font("Tahoma", 0, 14)); tfEvent3.setEditable(false); tfEvent3.setFont(new java.awt.Font("Tahoma", 0, 14)); tfAirFASE3.setEditable(false); tfAirFASE3.setFont(new java.awt.Font("Tahoma", 0, 14)); tfAss3.setEditable(false); tfAss3.setFont(new java.awt.Font("Tahoma", 0, 14)); tfEvent4.setEditable(false); tfEvent4.setFont(new java.awt.Font("Tahoma", 0, 14)); tfEvent4.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { tfEvent4ActionPerformed(evt); } }); tfAirFASE4.setEditable(false); tfAirFASE4.setFont(new java.awt.Font("Tahoma", 0, 14)); tfAss4.setEditable(false); tfAss4.setFont(new java.awt.Font("Tahoma", 0, 14)); tpPrim.setEditable(false); tpPrim.setFont(new java.awt.Font("Tahoma", 0, 14)); jScrollPane2.setViewportView(tpPrim); jLabel11.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel11.setText("Примечание"); jLabel12.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel12.setText("Анализ полета *"); tpAnalysis.setFont(new java.awt.Font("Tahoma", 0, 14)); jScrollPane3.setViewportView(tpAnalysis); jLabel13.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel13.setText("Корректирующие действия *"); tpCorrectiveActions.setFont(new java.awt.Font("Tahoma", 0, 14)); jScrollPane4.setViewportView(tpCorrectiveActions); jLabel14.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel14.setText("Командир АЭ / Зам. командира АЭ:"); tfCommanderAE.setEditable(false); tfCommanderAE.setFont(new java.awt.Font("Tahoma", 0, 14)); jLabel19.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel19.setText("Дата анализа:"); tfAnalysisDate.setFont(new java.awt.Font("Tahoma", 0, 14)); tfAnalysisDate.setToolTipText("Дата устанавливается автоматически при нажатии на кнопку \"Отправить\" в клиенте авиаэскадрильи. Дата соотвествует настройкам сервера."); jButton1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/editList.png"))); // NOI18N jButton1.setText("Исправить *"); jButton1.setToolTipText("Исправить анализ полета и корректирующие действия"); jButton1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton1ActionPerformed(evt); } }); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(17, 17, 17) .addComponent(jLabel13)) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(17, 17, 17) .addComponent(jLabel12)) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(17, 17, 17) .addComponent(jLabel11)) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 1063, Short.MAX_VALUE)) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 1063, Short.MAX_VALUE)) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 1063, Short.MAX_VALUE)) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(tfEvent1, javax.swing.GroupLayout.DEFAULT_SIZE, 596, Short.MAX_VALUE) .addComponent(tfEvent2, javax.swing.GroupLayout.DEFAULT_SIZE, 596, Short.MAX_VALUE) .addComponent(tfEvent3, javax.swing.GroupLayout.DEFAULT_SIZE, 596, Short.MAX_VALUE) .addComponent(tfEvent4, javax.swing.GroupLayout.DEFAULT_SIZE, 596, Short.MAX_VALUE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(17, 17, 17) .addComponent(jLabel1) .addGap(454, 454, 454))) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(tfAirFASE4, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 173, Short.MAX_VALUE) .addComponent(tfAirFASE3, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 173, Short.MAX_VALUE) .addComponent(tfAirFASE2, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 173, Short.MAX_VALUE) .addComponent(tfAirFASE1, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 173, Short.MAX_VALUE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)) .addGroup(jPanel1Layout.createSequentialGroup() .addComponent(jLabel3) .addGap(78, 78, 78))) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, 278, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(tfAss2, javax.swing.GroupLayout.DEFAULT_SIZE, 282, Short.MAX_VALUE) .addComponent(tfAss1, javax.swing.GroupLayout.DEFAULT_SIZE, 282, Short.MAX_VALUE) .addComponent(tfAss3, javax.swing.GroupLayout.DEFAULT_SIZE, 282, Short.MAX_VALUE) .addComponent(tfAss4, javax.swing.GroupLayout.DEFAULT_SIZE, 282, Short.MAX_VALUE)))) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(10, 10, 10) .addComponent(jLabel19) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(tfAnalysisDate, javax.swing.GroupLayout.PREFERRED_SIZE, 248, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel14) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(tfCommanderAE, javax.swing.GroupLayout.PREFERRED_SIZE, 244, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 14, Short.MAX_VALUE) .addComponent(jButton1)) .addComponent(jScrollPane4, javax.swing.GroupLayout.DEFAULT_SIZE, 1063, Short.MAX_VALUE)))) .addContainerGap()) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 53, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(jLabel2) .addComponent(jLabel3)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(tfEvent1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tfAss1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tfAirFASE1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(tfEvent2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tfAirFASE2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tfAss2, javax.swing.GroupLayout.PREFERRED_SIZE, 26, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(tfEvent3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tfAss3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tfAirFASE3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(tfEvent4, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tfAss4, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tfAirFASE4, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel11) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 56, Short.MAX_VALUE) .addGap(2, 2, 2) .addComponent(jLabel12) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 57, Short.MAX_VALUE) .addGap(1, 1, 1) .addComponent(jLabel13) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jScrollPane4, javax.swing.GroupLayout.DEFAULT_SIZE, 55, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel19) .addComponent(tfAnalysisDate, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel14) .addComponent(tfCommanderAE, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jButton1)) .addContainerGap()) ); jTabbedPane1.setBorder(new javax.swing.border.SoftBevelBorder(javax.swing.border.BevelBorder.RAISED)); jLabel4.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel4.setText("КВС ID"); tfKVS.setFont(new java.awt.Font("Tahoma", 0, 14)); tfVP.setFont(new java.awt.Font("Tahoma", 0, 14)); jLabel5.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel5.setText("ВП ID"); jLabel6.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel6.setText("Пилотирующий"); comboPF.setFont(new java.awt.Font("Tahoma", 0, 14)); comboPF.setModel(new javax.swing.DefaultComboBoxModel(new String[]{" ", "КВС", "2П"})); comboPF.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { comboPFItemStateChanged(evt); } }); comboAE.setFont(new java.awt.Font("Tahoma", 0, 14)); comboAE.setModel(new javax.swing.DefaultComboBoxModel(new String[]{" ", "2", "3", "4", "5", "6", "7", "8", "10", "11", "12"})); comboAE.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { comboAEItemStateChanged(evt); } }); jLabel7.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel7.setText("№АЭ"); comboNesActions.setFont(new java.awt.Font("Tahoma", 0, 14)); comboNesActions.setModel(new javax.swing.DefaultComboBoxModel(new String[]{" ", "<html><bgcolor = #8DFF41><b>Анализ в АЭ </b></bgcolor>", "<html><bgcolor = #EBFF14><b>Углубленный анализ в ЛО </b></bgcolor>", "<html><bgcolor = #FFAC05><b>Служебное расследование в ЛО</b></bgcolor>", "<html><bgcolor = red><b>Расследование в соответствие с ПРАПИ</b></bgcolor>", "Не учитывать", "Не действительно"})); comboNesActions.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { comboNesActionsItemStateChanged(evt); } }); jLabel8.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel8.setText("Необходимые мероприятия"); jLabel9.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel9.setText("Контроль"); comboControl.setFont(new java.awt.Font("Tahoma", 0, 14)); comboControl.setModel(new javax.swing.DefaultComboBoxModel(new String[]{" ", FD_CHEIF_ASSESSMENT, FD_SUBCHIEF_ASSESSMENT})); comboControl.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { comboControlItemStateChanged(evt); } }); dateCorDeadline.setFont(new java.awt.Font("Tahoma", 0, 14)); bSendToAE.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/sendToPilots.png"))); // NOI18N bSendToAE.setText("Отправить в АЭ"); bSendToAE.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bSendToAEActionPerformed(evt); } }); jLabel10.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel10.setText(" <NAME>"); bCrew.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/pilot.png"))); // NOI18N bCrew.setText("Выбрать экипаж"); bCrew.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bCrewActionPerformed(evt); } }); bInfoVp.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/info2.png"))); // NOI18N bInfoVp.setToolTipText("Информация по второму пилоту"); bInfoVp.setMaximumSize(new java.awt.Dimension(20, 20)); bInfoVp.setMinimumSize(new java.awt.Dimension(20, 20)); bInfoVp.setPreferredSize(new java.awt.Dimension(20, 20)); bInfoVp.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bInfoVpActionPerformed(evt); } }); bInfoKvs.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/info2.png"))); // NOI18N bInfoKvs.setToolTipText("Информация по командиру"); bInfoKvs.setMaximumSize(new java.awt.Dimension(20, 20)); bInfoKvs.setMinimumSize(new java.awt.Dimension(20, 20)); bInfoKvs.setPreferredSize(new java.awt.Dimension(20, 20)); bInfoKvs.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bInfoKvsActionPerformed(evt); } }); bFiles.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/files-folders.png"))); // NOI18N bFiles.setText("Материалы"); bFiles.setToolTipText("Открывает или создает папку на сервере для хранения материалов события"); bFiles.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bFilesActionPerformed(evt); } }); jLabel20.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel20.setText("Действия"); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(bCrew) .addGap(696, 696, 696)) .addGroup(jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(tfKVS, javax.swing.GroupLayout.DEFAULT_SIZE, 57, Short.MAX_VALUE) .addComponent(jLabel4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(bInfoKvs, javax.swing.GroupLayout.PREFERRED_SIZE, 20, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel5, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(tfVP, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 47, Short.MAX_VALUE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(bInfoVp, javax.swing.GroupLayout.PREFERRED_SIZE, 20, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(comboPF, 0, 131, Short.MAX_VALUE) .addComponent(jLabel6, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(comboAE, javax.swing.GroupLayout.PREFERRED_SIZE, 49, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel7, javax.swing.GroupLayout.PREFERRED_SIZE, 45, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(comboNesActions, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel8, javax.swing.GroupLayout.PREFERRED_SIZE, 248, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel9) .addComponent(comboControl, 0, 79, Short.MAX_VALUE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel10) .addGap(63, 63, 63)) .addComponent(dateCorDeadline, javax.swing.GroupLayout.PREFERRED_SIZE, 137, javax.swing.GroupLayout.PREFERRED_SIZE)))) .addGap(6, 6, 6) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(jLabel20, javax.swing.GroupLayout.Alignment.LEADING) .addComponent(bFiles, javax.swing.GroupLayout.PREFERRED_SIZE, 135, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(bSendToAE)) .addContainerGap()) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(18, 18, 18) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel10) .addGap(18, 18, 18) .addComponent(dateCorDeadline, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(bInfoVp, javax.swing.GroupLayout.PREFERRED_SIZE, 20, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(jLabel5) .addComponent(jLabel6) .addComponent(jLabel20) .addComponent(jLabel9) .addComponent(jLabel8) .addComponent(jLabel7)) .addGap(18, 18, 18) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(comboPF, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(comboNesActions, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(comboControl, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(comboAE, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(tfKVS, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tfVP, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(bSendToAE))) .addComponent(bInfoKvs, javax.swing.GroupLayout.PREFERRED_SIZE, 20, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 32, Short.MAX_VALUE) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(bCrew) .addComponent(bFiles)) .addContainerGap()) ); jTabbedPane1.addTab("Отправка в авиаэскадрилью", jPanel2); jLabel15.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel15.setText("Для информации в авиаэскадрилью о данном событии"); jLabel15.setToolTipText("Для отправки сообщения в авиаэскадрилью, просто заполните поле ниже и нажмите кнопку \"Подтвердить\". В эскадрильи смогут увидить причину, почему корректирующие действия по событию не были, например, приняты."); tpInfoFromGLazkov.setFont(new java.awt.Font("Tahoma", 0, 14)); jScrollPane5.setViewportView(tpInfoFromGLazkov); jLabel16.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel16.setText("Оценка корректирующих действий"); buttonGroup1.add(rbNotCA); rbNotCA.setText("Не одобрены"); rbNotCA.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { rbNotCAItemStateChanged(evt); } }); rbNotCA.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rbNotCAActionPerformed(evt); } }); buttonGroup1.add(rbGlazkov); rbGlazkov.setText("<NAME>"); rbGlazkov.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { rbGlazkovItemStateChanged(evt); } }); buttonGroup1.add(rbRedkin); rbRedkin.setText("<NAME>"); rbRedkin.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { rbRedkinItemStateChanged(evt); } }); bAcceptCorAct.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/OK.png"))); // NOI18N bAcceptCorAct.setText("Подтвердить"); bAcceptCorAct.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bAcceptCorActActionPerformed(evt); } }); bCorrActGreen.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/green.png"))); // NOI18N bCorrActGreen.setText("Присвоить низкий"); bCorrActGreen.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bCorrActGreenActionPerformed(evt); } }); jPanel9.setBorder(javax.swing.BorderFactory.createEtchedBorder()); bMatrixFD.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/matrix.png"))); // NOI18N bMatrixFD.setText("Матрица риска"); bMatrixFD.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bMatrixFDActionPerformed(evt); } }); javax.swing.GroupLayout jPanel9Layout = new javax.swing.GroupLayout(jPanel9); jPanel9.setLayout(jPanel9Layout); jPanel9Layout.setHorizontalGroup( jPanel9Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel9Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel9Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel9Layout.createSequentialGroup() .addComponent(tfIndexFD, javax.swing.GroupLayout.PREFERRED_SIZE, 15, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(bMatrixFD)) .addComponent(tfRiskIndexFD, javax.swing.GroupLayout.DEFAULT_SIZE, 260, Short.MAX_VALUE)) .addContainerGap()) ); jPanel9Layout.setVerticalGroup( jPanel9Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel9Layout.createSequentialGroup() .addContainerGap() .addComponent(tfRiskIndexFD, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(15, 15, 15) .addGroup(jPanel9Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(bMatrixFD, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(tfIndexFD, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap()) ); javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3); jPanel3.setLayout(jPanel3Layout); jPanel3Layout.setHorizontalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jScrollPane5, javax.swing.GroupLayout.DEFAULT_SIZE, 468, Short.MAX_VALUE) .addComponent(jLabel15, javax.swing.GroupLayout.DEFAULT_SIZE, 468, Short.MAX_VALUE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addGroup(jPanel3Layout.createSequentialGroup() .addComponent(jPanel9, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGap(18, 18, 18) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup() .addComponent(rbNotCA) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(rbRedkin) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(rbGlazkov)) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup() .addComponent(bAcceptCorAct) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(bCorrActGreen)))) .addComponent(jLabel16)) .addContainerGap()) ); jPanel3Layout.setVerticalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel15) .addComponent(jLabel16)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addGap(20, 20, 20) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(rbGlazkov) .addComponent(rbRedkin) .addComponent(rbNotCA)) .addGap(12, 12, 12) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(bCorrActGreen) .addComponent(bAcceptCorAct))) .addComponent(jScrollPane5, javax.swing.GroupLayout.DEFAULT_SIZE, 94, Short.MAX_VALUE) .addComponent(jPanel9, javax.swing.GroupLayout.DEFAULT_SIZE, 94, Short.MAX_VALUE)) .addContainerGap()) ); jTabbedPane1.addTab("Оценка корректирующих действий", jPanel3); jPanel6.setBorder(javax.swing.BorderFactory.createEtchedBorder()); jLabel18.setFont(new java.awt.Font("Tahoma", 1, 16)); jLabel18.setText("Окончательная оценка риска и закрытие талона"); bMatrix.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/matrix.png"))); // NOI18N bMatrix.setText("Матрица риска"); bMatrix.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bMatrixActionPerformed(evt); } }); jPanel7.setBorder(javax.swing.BorderFactory.createEtchedBorder()); buttonGroup2.add(rbZubkov); rbZubkov.setText("<NAME>. ИБП"); rbZubkov.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { rbZubkovItemStateChanged(evt); } }); buttonGroup2.add(rbNotClosure); rbNotClosure.setText("Не закрыт"); rbNotClosure.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { rbNotClosureItemStateChanged(evt); } }); bAcceptClosure.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/OK.png"))); // NOI18N bAcceptClosure.setText("Подтвердить"); bAcceptClosure.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bAcceptClosureActionPerformed(evt); } }); buttonGroup2.add(rbChebrov); rbChebrov.setText("Начальник ИБП"); rbChebrov.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { rbChebrovItemStateChanged(evt); } }); javax.swing.GroupLayout jPanel7Layout = new javax.swing.GroupLayout(jPanel7); jPanel7.setLayout(jPanel7Layout); jPanel7Layout.setHorizontalGroup( jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel7Layout.createSequentialGroup() .addContainerGap(31, Short.MAX_VALUE) .addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel7Layout.createSequentialGroup() .addComponent(rbNotClosure) .addGap(18, 18, 18) .addComponent(rbZubkov) .addGap(18, 18, 18) .addComponent(rbChebrov) .addGap(18, 18, 18)) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel7Layout.createSequentialGroup() .addComponent(bAcceptClosure) .addContainerGap()))) ); jPanel7Layout.setVerticalGroup( jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel7Layout.createSequentialGroup() .addContainerGap(17, Short.MAX_VALUE) .addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(rbChebrov) .addComponent(rbZubkov) .addComponent(rbNotClosure)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(bAcceptClosure) .addGap(6, 6, 6)) ); javax.swing.GroupLayout jPanel6Layout = new javax.swing.GroupLayout(jPanel6); jPanel6.setLayout(jPanel6Layout); jPanel6Layout.setHorizontalGroup( jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel6Layout.createSequentialGroup() .addContainerGap(16, Short.MAX_VALUE) .addGroup(jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addGroup(jPanel6Layout.createSequentialGroup() .addComponent(tfIndex, javax.swing.GroupLayout.PREFERRED_SIZE, 49, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(164, 164, 164) .addComponent(tfRiskIndex, javax.swing.GroupLayout.PREFERRED_SIZE, 250, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(10, 10, 10) .addComponent(bMatrix) .addGap(6, 6, 6)) .addComponent(jLabel18)) .addGap(46, 46, 46) .addComponent(jPanel7, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap()) ); jPanel6Layout.setVerticalGroup( jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel6Layout.createSequentialGroup() .addGroup(jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel6Layout.createSequentialGroup() .addGap(57, 57, 57) .addComponent(tfIndex, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(jPanel6Layout.createSequentialGroup() .addGap(16, 16, 16) .addComponent(jLabel18) .addGap(18, 18, 18) .addGroup(jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(bMatrix) .addComponent(tfRiskIndex, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGroup(jPanel6Layout.createSequentialGroup() .addContainerGap() .addComponent(jPanel7, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) .addContainerGap()) ); javax.swing.GroupLayout jPanel5Layout = new javax.swing.GroupLayout(jPanel5); jPanel5.setLayout(jPanel5Layout); jPanel5Layout.setHorizontalGroup( jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel5Layout.createSequentialGroup() .addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel5Layout.createSequentialGroup() .addGap(467, 467, 467) .addComponent(jLabel22)) .addGroup(jPanel5Layout.createSequentialGroup() .addContainerGap() .addComponent(jPanel6, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) .addContainerGap()) ); jPanel5Layout.setVerticalGroup( jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel5Layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jLabel22) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jPanel6, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(53, 53, 53)) ); jTabbedPane1.addTab("Закрытие талона", jPanel5); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addGroup(layout.createSequentialGroup() .addGap(10, 10, 10) .addComponent(jTabbedPane1, 0, 0, Short.MAX_VALUE)) .addComponent(jPanel1, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jTabbedPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 176, javax.swing.GroupLayout.PREFERRED_SIZE)) ); pack(); }// </editor-fold>//GEN-END:initComponents private void tfEvent4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_tfEvent4ActionPerformed // TODO удалить этот метод }//GEN-LAST:event_tfEvent4ActionPerformed private void bMatrixActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bMatrixActionPerformed // кнопка вызова матрицы MatrixWindow matrix = new MatrixWindow(); matrix.setParent(this); Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); int locX = (screenSize.width - 900) / 2; int locY = (screenSize.height - 630) / 2; matrix.setLocation(locX, locY); matrix.setVisible(true); }//GEN-LAST:event_bMatrixActionPerformed private void rbNotCAItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_rbNotCAItemStateChanged // изменения фона и текста кнопки "не закрыто" в ЛО если выбрана if (evt.getStateChange() == ItemEvent.SELECTED) { rbNotCA.setForeground(Color.red); rbNotCA.setFont(rbNotCA.getFont().deriveFont(Font.BOLD | Font.ITALIC)); corActFIO = ""; } else if (evt.getStateChange() == ItemEvent.DESELECTED) { rbNotCA.setForeground(jPanel5.getForeground()); rbNotCA.setFont(rbNotClosure.getFont().deriveFont(Font.PLAIN)); } }//GEN-LAST:event_rbNotCAItemStateChanged private void rbRedkinItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_rbRedkinItemStateChanged // изменения фона и текста кнопки "Зам КЛО" в ЛО если выбрана if (evt.getStateChange() == ItemEvent.SELECTED) { rbRedkin.setForeground(Color.BLUE); rbRedkin.setFont(rbRedkin.getFont().deriveFont(Font.BOLD | Font.ITALIC)); corActFIO = FD_SUBCHIEF_ASSESSMENT; } else if (evt.getStateChange() == ItemEvent.DESELECTED) { rbRedkin.setForeground(jPanel5.getForeground()); rbRedkin.setFont(rbRedkin.getFont().deriveFont(Font.PLAIN)); } }//GEN-LAST:event_rbRedkinItemStateChanged private void rbGlazkovItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_rbGlazkovItemStateChanged // изменения фона и текста кнопки "<NAME>О" в ЛО если выбрана if (evt.getStateChange() == ItemEvent.SELECTED) { rbGlazkov.setForeground(Color.BLUE); rbGlazkov.setFont(rbGlazkov.getFont().deriveFont(Font.BOLD | Font.ITALIC)); corActFIO = FD_CHEIF_ASSESSMENT; } else if (evt.getStateChange() == ItemEvent.DESELECTED) { rbGlazkov.setForeground(jPanel5.getForeground()); rbGlazkov.setFont(rbGlazkov.getFont().deriveFont(Font.PLAIN)); } }//GEN-LAST:event_rbGlazkovItemStateChanged private void bSendToAEActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bSendToAEActionPerformed // Отправка в авиаэскадрилью - кнопка отправить if (dateCorDeadline.getDate() == null) { JOptionPane.showMessageDialog(null, "Срок КД не заполнен!"); } else { //подготавливаем стоки, где не должно быть одинарных ковычек при помощи регулярный выражений String kvs = tfKVS.getText(); String kvsPrepared = kvs.replaceAll("\\'", ""); String vp = tfVP.getText(); String vpPrepared = vp.replaceAll("\\'", ""); try { // Кнопка отправить ответ if (new DBUpdaterWorkingWindow().update(globalIncNbr, kvsPrepared, vpPrepared, comboPF.getSelectedItem().toString(), comboAE.getSelectedItem().toString(), comboNesActions.getSelectedItem().toString(), comboControl.getSelectedItem().toString(), dateCorDeadline.getDate())) { JOptionPane.showMessageDialog(null, "Событие успешно передано в авиаэскадрилью № " + comboAE.getSelectedItem().toString()); this.parent.getConcoleText(" >> Событие номер " + insideStr + " было передано в авиаэскадрилью № " + comboAE.getSelectedItem().toString()); dispose(); } else { JOptionPane.showMessageDialog(null, "Не получилось сохранить, пожалуйста, проинформируйте разработчка об ошибке! Метод bSendToAEActionPerformed класса WorkingWindow3"); } } catch (ParseException ex) { Logger.getLogger(WorkingWindow3.class.getName()).log(Level.SEVERE, null, ex); } } }//GEN-LAST:event_bSendToAEActionPerformed private void bAcceptCorActActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bAcceptCorActActionPerformed // Оценка корректирующих действий - работа кнопки "подтвердить" String indexIncoming; int index = 0; if ((tfRiskIndexFD.getText().isEmpty() & rbRedkin.isSelected() == true) || (tfRiskIndexFD.getText().isEmpty() & rbGlazkov.isSelected() == true)) { JOptionPane.showMessageDialog(null, "Пожалуйста, выберите степень риска!"); } else if (tfRiskIndexFD.getText().contains("риск") & rbRedkin.isSelected() == false & rbGlazkov.isSelected() == false) { JOptionPane.showMessageDialog(null, "Если выбрана степень риска, то пожалуйста, выберите должность лица, производящего оценку корректирующих действий. Либо измените поле с оценкой риска"); } else { // если пользователь все заполнил - закрываем талон, отправляем данные в БД try { if (tfIndexFD.getText().length() != 0) { indexIncoming = tfIndexFD.getText(); index = Integer.parseInt(indexIncoming); } //если индекс риска 10 и менее - сразу закрываем талон if ((index <= 10 & rbRedkin.isSelected() == true) || (index <= 10 & rbGlazkov.isSelected() == true)) { String talonClosingToken = <PASSWORD>; if (new DBUpdaterWorkingWindow().update2pageWithClosure(globalIncNbr, tpInfoFromGLazkov.getText(), index, corActFIO, talonClosingToken)) { JOptionPane.showMessageDialog(null, "Информация успешно добавлена в базу данных"); if (rbRedkin.isSelected() == true || rbGlazkov.isSelected() == true) { this.parent.getConcoleText(" >> Корректирующие дейcтвия по событию номер " + insideStr + " были одобренны. Событие получило статус <КД одобрены> в базе."); dispose(); } else { this.parent.getConcoleText(" >> Ответ по событию номер " + insideStr + " был передан в авиаэскадрилью № " + comboAE.getSelectedItem().toString() + " Событие имеет статус <КД не одобрены>"); dispose(); } } else { JOptionPane.showMessageDialog(null, "Не получилось сохранить запись, пожалуйста, проверьте подключение к базе данных!"); } //иначе выставляем риск и оставляет талон закрытм } else { if (new DBUpdaterWorkingWindow().update2page(globalIncNbr, tpInfoFromGLazkov.getText(), index, corActFIO)) { JOptionPane.showMessageDialog(null, "Информация успешно добавлена в базу данных"); if (rbRedkin.isSelected() == true || rbGlazkov.isSelected() == true) { this.parent.getConcoleText(" >> Корректирующие дейcтвия по событию номер " + insideStr + " были одобренны. Событие получило статус <КД одобрены> в базе."); dispose(); } else { this.parent.getConcoleText(" >> Ответ по событию номер " + insideStr + " был передан в авиаэскадрилью № " + comboAE.getSelectedItem().toString() + " Событие имеет статус <КД не одобрены>"); dispose(); } } else { JOptionPane.showMessageDialog(null, "Не получилось сохранить запись, пожалуйста, проверьте подключение к базе данных!"); } } } catch (ParseException ex) { Logger.getLogger(WorkingWindow3.class.getName()).log(Level.SEVERE, null, ex); } } }//GEN-LAST:event_bAcceptCorActActionPerformed private void rbNotCAActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_rbNotCAActionPerformed // TODO удалить метод }//GEN-LAST:event_rbNotCAActionPerformed private void bCorrActGreenActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bCorrActGreenActionPerformed // Оценка корректирующих действий - работа кнопки "быстро зеленый" if (rbRedkin.isSelected() == false & rbGlazkov.isSelected() == false) { JOptionPane.showMessageDialog(null, "Пожалуйста, выберите должность закрывающего, для быстрой оценки корректирующих действий"); } else { try { // Кнопка отправить ответ String talonClosingToken = <PASSWORD>; int index = 2; if (new DBUpdaterWorkingWindow().update2pageWithClosure(globalIncNbr, tpInfoFromGLazkov.getText(), index, corActFIO, talonClosingToken)) { JOptionPane.showMessageDialog(null, "Информация успешно добавлена в базу данных"); if (rbRedkin.isSelected() == true || rbGlazkov.isSelected() == true) { this.parent.getConcoleText(" >> Корректирующие дейcтвия по событию номер " + insideStr + " были одобренны. Событие получило статус <КД одобрены> в базе."); dispose(); } } else { JOptionPane.showMessageDialog(null, "Не получилось сохранить запись, пожалуйста, проверьте подключение к базе данных!"); } } catch (ParseException ex) { Logger.getLogger(WorkingWindow3.class.getName()).log(Level.SEVERE, null, ex); } } }//GEN-LAST:event_bCorrActGreenActionPerformed private void bCrewActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bCrewActionPerformed // кнопка вызова справочника персонала Personnel pers = new Personnel(); pers.setParent(this); pers.setVisible(true); }//GEN-LAST:event_bCrewActionPerformed private void bInfoKvsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bInfoKvsActionPerformed // кнопка информация по командиру if (tfKVS.getText().equals("") || tfKVS.getText().equals(" ") || tfKVS.getText().equals(" ") || tfKVS.getText() == null) { } else { String ID = tfKVS.getText(); Object[] infoData = new PersonnelDBUpdater().getInfoKVS(ID); if (infoData == null) { JOptionPane.showMessageDialog(null, "Сотрудник с ID: " + ID + " не найден в перечне персонала ЛО"); } else { String name, ae, position; if (infoData[0] == null) { name = ""; } else { name = infoData[0].toString(); } if (infoData[1] == null) { ae = ""; } else { ae = infoData[1].toString(); } if (infoData[2] == null) { position = ""; } else { position = infoData[2].toString(); } JOptionPane.showMessageDialog(null, "ФИО: " + name + " \nАвиаэскадрилья: " + ae + "\nДолжность: " + position); } } }//GEN-LAST:event_bInfoKvsActionPerformed private void bInfoVpActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bInfoVpActionPerformed // кнопка информация по второму пилоту if (tfVP.getText().equals("") || tfVP.getText().equals(" ") || tfVP.getText().equals(" ") || tfVP.getText() == null) { } else { String ID = tfVP.getText(); Object[] infoData = new PersonnelDBUpdater().getInfoKVS(ID); if (infoData == null) { JOptionPane.showMessageDialog(null, "Сотрудник с ID: " + ID + " не найден в перечне персонала ЛО"); } else { String name, ae, position; if (infoData[0] == null) { name = ""; } else { name = infoData[0].toString(); } if (infoData[1] == null) { ae = ""; } else { ae = infoData[1].toString(); } if (infoData[2] == null) { position = ""; } else { position = infoData[2].toString(); } JOptionPane.showMessageDialog(null, "ФИО: " + name + " \nАвиаэскадрилья: " + ae + "\nДолжность: " + position); } } }//GEN-LAST:event_bInfoVpActionPerformed private void openFile() { //метод открывает файл с нстройками рабочего окна try { File file = new File("./lib/exchange.properties").getAbsoluteFile(); scan = new Scanner(file); } catch (FileNotFoundException ex) { JOptionPane.showMessageDialog(null, "Файл exchange.properties не найден"); } } private String readFile() { String folderNameOnServer = scan.next(); return folderNameOnServer; } private void bFilesActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bFilesActionPerformed String fileFolder = Main.netFolderPath + folderName; File f = new File(fileFolder); if (f.exists()) { try { Runtime.getRuntime().exec("explorer " + fileFolder); } catch (IOException ex) { Logger.getLogger(WorkingWindow3.class.getName()).log(Level.SEVERE, null, ex); } } else { new File(fileFolder).mkdir(); try { Runtime.getRuntime().exec("explorer " + fileFolder); } catch (IOException ex) { Logger.getLogger(WorkingWindow3.class.getName()).log(Level.SEVERE, null, ex); } } }//GEN-LAST:event_bFilesActionPerformed private void rbZubkovItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_rbZubkovItemStateChanged // изменения фона и текста кнопки "зам нач ИБП" в ИБП если выбрана if (evt.getStateChange() == ItemEvent.SELECTED) { rbZubkov.setForeground(Color.BLUE); rbZubkov.setFont(rbZubkov.getFont().deriveFont(Font.BOLD | Font.ITALIC)); talonCloseFIO = "Зубков О.В."; } else if (evt.getStateChange() == ItemEvent.DESELECTED) { rbZubkov.setForeground(jPanel5.getForeground()); rbZubkov.setFont(rbZubkov.getFont().deriveFont(Font.PLAIN)); } }//GEN-LAST:event_rbZubkovItemStateChanged private void rbChebrovItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_rbChebrovItemStateChanged // изменения фона и текста кнопки "нач ИБП" в ИБП если выбрана if (evt.getStateChange() == ItemEvent.SELECTED) { rbChebrov.setForeground(Color.BLUE); rbChebrov.setFont(rbChebrov.getFont().deriveFont(Font.BOLD | Font.ITALIC)); talonCloseFIO = SAFETY_MANAGER_ASSESSMENT; } else if (evt.getStateChange() == ItemEvent.DESELECTED) { rbChebrov.setForeground(jPanel5.getForeground()); rbChebrov.setFont(rbChebrov.getFont().deriveFont(Font.PLAIN)); } }//GEN-LAST:event_rbChebrovItemStateChanged private void bAcceptClosureActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bAcceptClosureActionPerformed // Закрытие талона - работа кнопки "подтвердить" String indexIncoming; int index = 0; if ((tfRiskIndex.getText().isEmpty() & rbZubkov.isSelected() == true) || (tfRiskIndex.getText().isEmpty() & rbChebrov.isSelected() == true)) { JOptionPane.showMessageDialog(null, "Пожалуйста, выберите степень риска!"); } else if (rbNotClosure.isSelected() == true) { try { if (tfIndex.getText().length() != 0) { indexIncoming = tfIndex.getText(); index = Integer.parseInt(indexIncoming); } // Кнопка отправить ответ talonCloseFIO = ""; if (new DBUpdaterWorkingWindow().update3page(globalIncNbr, index, talonCloseFIO)) { JOptionPane.showMessageDialog(null, "Талон получил статус <не закрыт>"); this.parent.getConcoleText(" >> Талон по событию номер " + insideStr + " получил статус <не закрыт>"); dispose(); } else { JOptionPane.showMessageDialog(null, "Не получилось сохранить, пожалуйста, проинформируйте разработчка об ошибке! Метод bAcceptClosureActionPerformed класса WorkingWindow3"); } } catch (ParseException ex) { Logger.getLogger(WorkingWindow3.class.getName()).log(Level.SEVERE, null, ex); } } else { try { if (tfIndex.getText().length() != 0) { indexIncoming = tfIndex.getText(); index = Integer.parseInt(indexIncoming); } // Кнопка отправить ответ if (new DBUpdaterWorkingWindow().update3page(globalIncNbr, index, talonCloseFIO)) { JOptionPane.showMessageDialog(null, "Информация успешно добавлена в базу данных"); if (rbZubkov.isSelected() == true || rbChebrov.isSelected() == true) { //newString = makeParsing(); this.parent.getConcoleText(" >> Талон по событию номер " + insideStr + " был <закрыт>."); dispose(); } else { this.parent.getConcoleText(" >> Талон по событию номер " + insideStr + " получил статус <не закрыт>"); dispose(); } } else { JOptionPane.showMessageDialog(null, "Не получилось сохранить, пожалуйста, проинформируйте разработчка об ошибке! Метод bAcceptClosureActionPerformed класса WorkingWindow3"); } } catch (ParseException ex) { Logger.getLogger(WorkingWindow3.class.getName()).log(Level.SEVERE, null, ex); } } }//GEN-LAST:event_bAcceptClosureActionPerformed private void rbNotClosureItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_rbNotClosureItemStateChanged // изменения фона и текста кнопки "не закрыто" в ИБП если выбрана if (evt.getStateChange() == ItemEvent.SELECTED) { rbNotClosure.setForeground(Color.red); rbNotClosure.setFont(rbNotClosure.getFont().deriveFont(Font.BOLD | Font.ITALIC)); talonCloseFIO = ""; } else if (evt.getStateChange() == ItemEvent.DESELECTED) { rbNotClosure.setForeground(jPanel5.getForeground()); rbNotClosure.setFont(rbNotClosure.getFont().deriveFont(Font.PLAIN)); } }//GEN-LAST:event_rbNotClosureItemStateChanged private void formWindowClosing(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_formWindowClosing boolean docChange = MyDocumentListener.isDocumentChanged(); if (docChange == true || comboChanged == true) { Object[] options = {"Не сохранять", "Отмена"}; int status = JOptionPane.showOptionDialog(evt.getWindow(), "Вы действительно хотите закрыть окно? Все несохраненные данные - будут утеряны!", "Закрыть окно", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0]); if (status == 0) { setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); dispose(); } else { setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); evt.getWindow().setVisible(true); } } }//GEN-LAST:event_formWindowClosing private void comboPFItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_comboPFItemStateChanged comboChanged = true; }//GEN-LAST:event_comboPFItemStateChanged private void comboAEItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_comboAEItemStateChanged comboChanged = true; }//GEN-LAST:event_comboAEItemStateChanged private void comboNesActionsItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_comboNesActionsItemStateChanged comboChanged = true; }//GEN-LAST:event_comboNesActionsItemStateChanged private void comboControlItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_comboControlItemStateChanged comboChanged = true; }//GEN-LAST:event_comboControlItemStateChanged private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed try { // Кнопка отправить ответ if (new DBUpdaterWorkingWindow().updateAnalysis(globalIncNbr, tpAnalysis.getText(), tpCorrectiveActions.getText())) { JOptionPane.showMessageDialog(null, "Анализ и корректирующие действия были успешно изменены!"); } else { JOptionPane.showMessageDialog(null, "Не получилось сохранить! Пожалуйста, проверьте доступность подключения к серверу"); } } catch (ParseException ex) { Logger.getLogger(WorkingWindow3.class.getName()).log(Level.SEVERE, null, ex); } }//GEN-LAST:event_jButton1ActionPerformed private void bMatrixFDActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bMatrixFDActionPerformed MatrixWindowFD matrixFD = new MatrixWindowFD(); matrixFD.setParent(this); Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); int locX = (screenSize.width - 900) / 2; int locY = (screenSize.height - 630) / 2; matrixFD.setLocation(locX, locY); matrixFD.setVisible(true); }//GEN-LAST:event_bMatrixFDActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton bAcceptClosure; private javax.swing.JButton bAcceptCorAct; private javax.swing.JButton bCorrActGreen; private javax.swing.JButton bCrew; private javax.swing.JButton bFiles; private javax.swing.JButton bInfoKvs; private javax.swing.JButton bInfoVp; private javax.swing.JButton bMatrix; private javax.swing.JButton bMatrixFD; private javax.swing.JButton bSendToAE; private javax.swing.ButtonGroup buttonGroup1; private javax.swing.ButtonGroup buttonGroup2; private javax.swing.JComboBox comboAE; private javax.swing.JComboBox comboControl; private javax.swing.JComboBox comboNesActions; private javax.swing.JComboBox comboPF; private com.toedter.calendar.JDateChooser dateCorDeadline; private javax.swing.JButton jButton1; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel10; private javax.swing.JLabel jLabel11; private javax.swing.JLabel jLabel12; private javax.swing.JLabel jLabel13; private javax.swing.JLabel jLabel14; private javax.swing.JLabel jLabel15; private javax.swing.JLabel jLabel16; private javax.swing.JLabel jLabel18; private javax.swing.JLabel jLabel19; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel20; private javax.swing.JLabel jLabel22; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JLabel jLabel7; private javax.swing.JLabel jLabel8; private javax.swing.JLabel jLabel9; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel2; private javax.swing.JPanel jPanel3; private javax.swing.JPanel jPanel4; private javax.swing.JPanel jPanel5; private javax.swing.JPanel jPanel6; private javax.swing.JPanel jPanel7; private javax.swing.JPanel jPanel8; private javax.swing.JPanel jPanel9; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JScrollPane jScrollPane2; private javax.swing.JScrollPane jScrollPane3; private javax.swing.JScrollPane jScrollPane4; private javax.swing.JScrollPane jScrollPane5; private javax.swing.JTabbedPane jTabbedPane1; private javax.swing.JRadioButton rbChebrov; private javax.swing.JRadioButton rbGlazkov; private javax.swing.JRadioButton rbNotCA; private javax.swing.JRadioButton rbNotClosure; private javax.swing.JRadioButton rbRedkin; private javax.swing.JRadioButton rbZubkov; private javax.swing.JTable tableGeneralInfo; private javax.swing.JTextField tfAirFASE1; private javax.swing.JTextField tfAirFASE2; private javax.swing.JTextField tfAirFASE3; private javax.swing.JTextField tfAirFASE4; private javax.swing.JTextField tfAnalysisDate; private javax.swing.JTextField tfAss1; private javax.swing.JTextField tfAss2; private javax.swing.JTextField tfAss3; private javax.swing.JTextField tfAss4; private javax.swing.JTextField tfCommanderAE; private javax.swing.JTextField tfEvent1; private javax.swing.JTextField tfEvent2; private javax.swing.JTextField tfEvent3; private javax.swing.JTextField tfEvent4; private javax.swing.JTextField tfIndex; private javax.swing.JTextField tfIndexFD; private javax.swing.JTextField tfKVS; private javax.swing.JTextField tfRiskIndex; private javax.swing.JTextField tfRiskIndexFD; private javax.swing.JTextField tfVP; private javax.swing.JTextPane tpAnalysis; private javax.swing.JTextPane tpCorrectiveActions; private javax.swing.JTextPane tpInfoFromGLazkov; private javax.swing.JTextPane tpPrim; // End of variables declaration//GEN-END:variables }
syrykh/python-for-android
tests/recipes/test_libmysqlclient.py
<filename>tests/recipes/test_libmysqlclient.py import unittest from unittest import mock from tests.recipes.recipe_lib_test import BaseTestForCmakeRecipe class TestLibmysqlclientRecipe(BaseTestForCmakeRecipe, unittest.TestCase): """ An unittest for recipe :mod:`~pythonforandroid.recipes.libmysqlclient` """ recipe_name = "libmysqlclient" @mock.patch("pythonforandroid.recipes.libmysqlclient.sh.rm") @mock.patch("pythonforandroid.recipes.libmysqlclient.sh.cp") @mock.patch("pythonforandroid.util.chdir") @mock.patch("pythonforandroid.build.ensure_dir") @mock.patch("pythonforandroid.archs.glob") @mock.patch("pythonforandroid.archs.find_executable") def test_build_arch( self, mock_find_executable, mock_glob, mock_ensure_dir, mock_current_directory, mock_sh_cp, mock_sh_rm, ): # We overwrite the base test method because we need # to mock a little more (`sh.cp` and `sh.rm`) super().test_build_arch() # make sure that the mocked methods are actually called mock_sh_cp.assert_called() mock_sh_rm.assert_called()
as1000/genome-nexus
service/src/test/java/org/cbioportal/genome_nexus/component/annotation/VariantClassificationResolverTest.java
package org.cbioportal.genome_nexus.component.annotation; import org.cbioportal.genome_nexus.model.VariantAnnotation; import org.cbioportal.genome_nexus.service.mock.CanonicalTranscriptResolverMocker; import org.cbioportal.genome_nexus.service.mock.VariantAnnotationMockData; import org.cbioportal.genome_nexus.service.mock.VariantTypeResolverMocker; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Spy; import org.mockito.junit.MockitoJUnitRunner; import java.io.IOException; import java.util.Map; import static org.junit.Assert.assertEquals; @RunWith(MockitoJUnitRunner.Silent.class) public class VariantClassificationResolverTest { @InjectMocks private VariantClassificationResolver variantClassificationResolver; @Mock private CanonicalTranscriptResolver canonicalTranscriptResolver; @Mock private VariantTypeResolver variantTypeResolver; @Spy private TranscriptConsequencePrioritizer consequencePrioritizer; @Spy private GenomicLocationResolver genomicLocationResolver; private final VariantAnnotationMockData variantAnnotationMockData = new VariantAnnotationMockData(); private final CanonicalTranscriptResolverMocker canonicalTranscriptResolverMocker = new CanonicalTranscriptResolverMocker(); private final VariantTypeResolverMocker variantTypeResolverMocker = new VariantTypeResolverMocker(); @Test public void resolveVariantClassificationForCanonical() throws IOException { Map<String, VariantAnnotation> variantMockData = this.variantAnnotationMockData.generateData(); this.canonicalTranscriptResolverMocker.mockMethods(variantMockData, this.canonicalTranscriptResolver); this.variantTypeResolverMocker.mockMethods(variantMockData, this.variantTypeResolver); assertEquals( "Frame_Shift_Ins", this.resolveForCanonical(variantMockData.get("1:g.65325832_65325833insG")) ); assertEquals( "In_Frame_Del", this.resolveForCanonical(variantMockData.get("3:g.14106026_14106037del")) ); assertEquals( "In_Frame_Ins", this.resolveForCanonical(variantMockData.get("3:g.14940279_14940280insCAT")) ); assertEquals( "Frame_Shift_Del", this.resolveForCanonical(variantMockData.get("3:g.114058003del")) ); assertEquals( "In_Frame_Ins", this.resolveForCanonical(variantMockData.get("4:g.9784947_9784948insAGA")) ); assertEquals( "Frame_Shift_Ins", this.resolveForCanonical(variantMockData.get("4:g.77675978_77675979insC")) ); assertEquals( "Frame_Shift_Del", this.resolveForCanonical(variantMockData.get("6:g.137519505_137519506del")) ); assertEquals( "Frame_Shift_Del", this.resolveForCanonical(variantMockData.get("6:g.137519505_137519506delinsA")) ); assertEquals( "Missense_Mutation", this.resolveForCanonical(variantMockData.get("7:g.140453136A>T")) ); assertEquals( "Frame_Shift_Ins", this.resolveForCanonical(variantMockData.get("8:g.37696499_37696500insG")) ); assertEquals( "Frame_Shift_Ins", this.resolveForCanonical(variantMockData.get("9:g.135797242delinsAT")) ); assertEquals( "Frame_Shift_Del", this.resolveForCanonical(variantMockData.get("10:g.101953779del")) ); assertEquals( "Nonsense_Mutation", this.resolveForCanonical(variantMockData.get("11:g.62393546_62393547delinsAA")) ); assertEquals( "Missense_Mutation", this.resolveForCanonical(variantMockData.get("12:g.25398285C>A")) ); assertEquals( "In_Frame_Del", this.resolveForCanonical(variantMockData.get("13:g.28608258_28608275del")) ); assertEquals( "In_Frame_Ins", this.resolveForCanonical(variantMockData.get("16:g.9057113_9057114insCTG")) ); assertEquals( "Splice_Site", this.resolveForCanonical(variantMockData.get("19:g.46141892_46141893delinsAA")) ); assertEquals( "Missense_Mutation", this.resolveForCanonical(variantMockData.get("22:g.29091840_29091841delinsCA")) ); assertEquals( "In_Frame_Del", this.resolveForCanonical(variantMockData.get("22:g.36689419_36689421del")) ); } @Test public void resolveVariantClassification() throws IOException { Map<String, VariantAnnotation> variantMockData = this.variantAnnotationMockData.generateData(); this.variantTypeResolverMocker.mockMethods(variantMockData, this.variantTypeResolver); assertEquals( "3'Flank", this.variantClassificationResolver.resolve( variantMockData.get("8:g.37696499_37696500insG"), variantMockData.get("8:g.37696499_37696500insG").getTranscriptConsequences().get(0) ) ); assertEquals( "5'Flank", this.variantClassificationResolver.resolve( variantMockData.get("11:g.62393546_62393547delinsAA"), variantMockData.get("11:g.62393546_62393547delinsAA").getTranscriptConsequences().get(0) ) ); assertEquals( "3'Flank", this.variantClassificationResolver.resolve( variantMockData.get("19:g.46141892_46141893delinsAA"), variantMockData.get("19:g.46141892_46141893delinsAA").getTranscriptConsequences().get(1) ) ); assertEquals( "Splice_Region", this.variantClassificationResolver.resolve( null, variantMockData.get("7:g.55220240G>T").getTranscriptConsequences().get(0) ) ); } private String resolveForCanonical(VariantAnnotation variantAnnotation) { return this.variantClassificationResolver.resolve( variantAnnotation, this.canonicalTranscriptResolver.resolve(variantAnnotation) ); } }
ministryofjustice/prison-visits
spec/configuration/filter_parameter_logging_spec.rb
<gh_stars>1-10 require 'rails_helper' RSpec.describe Rails.application.config.filter_parameters do it "filters out sensitive information" do expect(subject).to eq( [:password, :first_name, :last_name, :number, :date_of_birth, :email] ) end end
villChurch/AnimalCrossingNewHorizonAPI
src/main/java/com/williamspires/acnhapi/Model/Raymond.java
<gh_stars>0 package com.williamspires.acnhapi.Model; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Table; @Entity @Table(name="raymond") public class Raymond { @Id private int id; private String item; private String color_1; private String color_2; private String emotion; private String category; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getItem() { return item; } public void setItem(String item) { this.item = item; } public String getColor_1() { return color_1; } public void setColor_1(String colour_1) { this.color_1 = colour_1; } public String getColor_2() { return color_2; } public void setColor_2(String colour_2) { this.color_2 = colour_2; } public String getEmotion() { return emotion; } public void setEmotion(String emotion) { this.emotion = emotion; } public String getCategory() { return category; } public void setCategory(String category) { this.category = category; } }
dzcoin/DzCoinMiningAlgorithm
src/qt/test/uritests.h
// copyright (c) 2009-2014 the dzcoin core developers // distributed under the mit software license, see the accompanying // file copying or http://www.opensource.org/licenses/mit-license.php. #ifndef dzcoin_qt_test_uritests_h #define dzcoin_qt_test_uritests_h #include <qobject> #include <qtest> class uritests : public qobject { q_object private slots: void uritests(); }; #endif // dzcoin_qt_test_uritests_h
marga8080/spring-security-oauth2-jwt
oauth2-server/src/main/java/com/soj/controller/SmsController.java
<gh_stars>1-10 package com.soj.controller; import javax.servlet.http.HttpSession; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import com.soj.http.RespResult; import com.soj.utils.JedisUtils; @RestController @RequestMapping("sms") public class SmsController { /** * 登录发送验证码 * @param phone * @return */ @GetMapping("loginPhoneCode") public RespResult<Object> loginPhoneCode(String phone, HttpSession session) { RespResult<Object> result = new RespResult<Object>(); String code = "111111"; // TODO 随机码 测试用111111 sendSms(code); JedisUtils.set(session.getId() + "-" + phone, code, 5 * 60); // 5分钟内有效 return result; } /** * 发送短信验证码 * @param code */ private void sendSms(String code) { // TODO } }
imgly/adonis
src/lib/styles-buffer.js
import { requestAnimationFrame } from './utils' export default class StylesBuffer { constructor (adonis) { this._adonis = adonis this._injectionEnabled = true this._bufferedSelectors = {} this._buffer = [] this._styleNode = this._findStyleNode() this._sheet = this._findSheet() } /** * Finds the Stylesheet for our style node * @return {CSSStyleSheet} * @private */ _findSheet () { if (!this._styleNode) return if (this._styleNode.sheet) { return this._styleNode.sheet } // Find stylesheet const { styleSheets } = document for (let i = 0; i < styleSheets.length; i++) { const styleSheet = styleSheets[i] if (styleSheet.ownerNode === this._styleNode) { return styleSheet } } } /** * Finds the style node * @return {DOMElement} * @private */ _findStyleNode () { let { styleNode } = this._adonis.getOptions() if (styleNode) { return styleNode } if (typeof document === 'undefined') return null styleNode = document.createElement('style') styleNode.setAttribute('data-adonis', true) document.head.appendChild(styleNode) return styleNode } bufferCSS (css) { this._buffer.push(css) } /** * Buffers the given array of css rulesets * @param {String[][]} rulesets */ bufferRulesets (rulesets) { rulesets.forEach(([selector, css]) => { this._bufferedSelectors[selector] = true }) Array.prototype.push.apply(this._buffer, rulesets.map(([, css]) => css)) } /** * Checks if the given selector has been buffered already * @param {String} selector * @return {Boolean} */ isSelectorBuffered (selector) { return this._bufferedSelectors[selector] } /** * Disables the injection */ disableInjection () { this._injectionEnabled = false } /** * Enables the injection */ enableInjection () { this._injectionEnabled = true } /** * Flushes the buffered CSS to a string and returns it * @param {Boolean} clearBufferedSelectors = false * @return {String} */ flushToString (clearBufferedSelectors = false) { const { minified } = this._adonis.getOptions() const content = this._buffer.join(minified ? '' : '\n\n') this._buffer = [] if (clearBufferedSelectors) { this._bufferedSelectors = {} } return content } /** * Injects the css rules using CSSStyleSheet#insertRule * @private */ _injectFast () { this._buffer.forEach(rule => { this._sheet.insertRule(rule, this._sheet.cssRules.length) }) } /** * Injects the css rules by appending text nodes to the style node * @private */ _injectDebug () { const css = this.flushToString() if (!css) return const { minified } = this._adonis.getOptions() const hasContent = this._styleNode.innerHTML.length > 0 this._styleNode.appendChild( document.createTextNode((minified || !hasContent ? '' : '\n\n') + css) ) } /** * Actually flushes the css rules to the style node * @private */ _flushToStyleTag () { const { injectionMode } = this._adonis.getOptions() if (injectionMode === 'fast' && this._sheet.insertRule) { this._injectFast() } else if (injectionMode === 'debug') { this._injectDebug() } else { throw new Error(`Unknown CSS injection mode: \`${injectionMode}\``) } this._buffer = [] } /** * Schedules the injection of css rules into the style node */ flushToStyleTag () { if (!this._injectionEnabled) return const { batchInjection } = this._adonis.getOptions() if (!this._nextTick && batchInjection) { this._nextTick = requestAnimationFrame(() => { this._nextTick = null this._flushToStyleTag() }) } else if (!batchInjection) { this._flushToStyleTag() } } }
mattmaniak/fiflo
src/file_io.h
<reponame>mattmaniak/fiflo<filename>src/file_io.h #ifndef FILE_IO_H #define FILE_IO_H // File operations. #include "v_file.h" #include "config.h" #include "modes.h" #define FILE__AT_LEAST_ONE_TAB 1 extern bool input__printable_char(V_file* const, const char); // Load a file to the program. bool file_io__load(V_file* const, const Config* const, const Modes* const); // If there is tab char in a file, load e.g. 4 tabs to a memory. bool file_io__convert_tab_from_file(V_file* const, const Config* const, const Modes* const, const char); // Convert fiflo's virtual tabs, e.g. "\t\t\t\t" to the original '\t'. void file_io__convert_tab_to_file(const V_file* const, const Config* const, const size_t, size_t* const); // Put a whole text to a file. bool file_io__save(V_file* const, const Config* const); // Read a set branch from the ".git/" dir. bool file_io__get_git_branch(V_file* const); #endif
jakubdzubak1/mobile-messaging-sdk-android
infobip-mobile-messaging-api-java/src/main/java/org/infobip/mobile/messaging/api/support/util/MapUtils.java
<filename>infobip-mobile-messaging-api-java/src/main/java/org/infobip/mobile/messaging/api/support/util/MapUtils.java package org.infobip.mobile.messaging.api.support.util; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; /** * @author mstipanov * @since 08.03.2016. */ public abstract class MapUtils { public static Map<String, Collection<Object>> map(Object... kvargs) { HashMap<String, Collection<Object>> hashMap = new HashMap<>(); for (int i = 0; i < kvargs.length; i += 2) { hashMap.put(kvargs[i].toString(), Collections.singleton(kvargs[i + 1])); } return hashMap; } @SafeVarargs public static <K, V> Map<K, V> concat(Map<K, V>... mps) { return concat(null, mps); } @SafeVarargs public static <K, V> Map<K, V> concatOrEmpty(Map<K, V>... mps) { return MapUtils.concat(Collections.<K, V>emptyMap(), mps); } @SafeVarargs private static <K, V> Map<K, V> concat(Map<K, V> valueIfEmpty, Map<K, V>... mps) { List<Map<K, V>> maps = new LinkedList<>(Arrays.asList(mps)); for (Iterator<Map<K, V>> iterator = maps.iterator(); iterator.hasNext(); ) { Map<K, V> map = iterator.next(); if (map == null) { iterator.remove(); } } if (maps.isEmpty()) { return valueIfEmpty; } Map<K, V> result = new HashMap<>(); for (int i = 0; i < maps.size(); i++) { result.putAll(maps.get(i)); } return result; } }
Azure/commercial-marketplace-sdk-java
sdk/src/main/java/com/microsoft/marketplace/meter/models/UsageEventBadRequestResponseDetail.java
<reponame>Azure/commercial-marketplace-sdk-java // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. package com.microsoft.marketplace.meter.models; import com.azure.core.annotation.Fluent; import com.fasterxml.jackson.annotation.JsonProperty; /** The UsageEventBadRequestResponseDetail model. */ @Fluent public final class UsageEventBadRequestResponseDetail { /* * The code property. */ @JsonProperty(value = "code") private String code; /* * The message property. */ @JsonProperty(value = "message") private String message; /* * The target property. */ @JsonProperty(value = "target") private String target; /** * Get the code property: The code property. * * @return the code value. */ public String getCode() { return this.code; } /** * Set the code property: The code property. * * @param code the code value to set. * @return the UsageEventBadRequestResponseDetail object itself. */ public UsageEventBadRequestResponseDetail setCode(String code) { this.code = code; return this; } /** * Get the message property: The message property. * * @return the message value. */ public String getMessage() { return this.message; } /** * Set the message property: The message property. * * @param message the message value to set. * @return the UsageEventBadRequestResponseDetail object itself. */ public UsageEventBadRequestResponseDetail setMessage(String message) { this.message = message; return this; } /** * Get the target property: The target property. * * @return the target value. */ public String getTarget() { return this.target; } /** * Set the target property: The target property. * * @param target the target value to set. * @return the UsageEventBadRequestResponseDetail object itself. */ public UsageEventBadRequestResponseDetail setTarget(String target) { this.target = target; return this; } }
Priba91/Meeting
Convoy Caravan Code parts/UIElements/TableView Cells/Place Cells/PlaceMapTableViewCell.h
<filename>Convoy Caravan Code parts/UIElements/TableView Cells/Place Cells/PlaceMapTableViewCell.h<gh_stars>0 // // PlaceMapTableViewCell.h // <NAME> // // Created by Priba on 10/31/18. // Copyright © 2018 Priba. All rights reserved. // #import <UIKit/UIKit.h> #import "PlaceModel.h" #import <GoogleMaps/GoogleMaps.h> #import <GooglePlaces/GooglePlaces.h> #import "LocationManager.h" NS_ASSUME_NONNULL_BEGIN @interface PlaceMapTableViewCell : UITableViewCell @property (weak, nonatomic) IBOutlet UILabel *titleLbl; @property (weak, nonatomic) IBOutlet GMSMapView *mapView; @property (strong, nonatomic) GMSMarker *marker; - (void)populateWithPlace:(PlaceModel*)place; @end NS_ASSUME_NONNULL_END
vbresan/Waiter
source/Google app engine server side/src/biz/binarysolutions/android/gae/test/waiter/data/Storno.java
package biz.binarysolutions.android.gae.test.waiter.data; import java.util.Date; import javax.jdo.annotations.IdGeneratorStrategy; import javax.jdo.annotations.IdentityType; import javax.jdo.annotations.PersistenceCapable; import javax.jdo.annotations.Persistent; import javax.jdo.annotations.PrimaryKey; /** * * */ @PersistenceCapable(identityType = IdentityType.APPLICATION) public class Storno { @SuppressWarnings("unused") @PrimaryKey @Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY) private Long id; @Persistent private Date date; @Persistent private String tableId; /** * * @param tableId */ public Storno(String tableId) { this.date = new Date(); this.tableId = tableId; } /** * * @return */ public Date getDate() { return date; } /** * * @return */ public String getTableId() { return tableId; } }
GlobalFinPrint/global_finprint
global_finprint/bruv/migrations/0030_auto_20170815_2045.py
<gh_stars>0 # -*- coding: utf-8 -*- # Generated by Django 1.10.7 on 2017-08-15 20:45 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('bruv', '0029_auto_20170813_1910'), ] operations = [ migrations.AlterField( model_name='set', name='code', field=models.CharField(blank=True, db_index=True, help_text='[site + reef code]_xxx', max_length=32, null=True), ), ]
wedataintelligence/vivaldi-source
chromium/third_party/WebKit/Source/core/animation/LengthBoxStyleInterpolation.cpp
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "core/animation/LengthBoxStyleInterpolation.h" #include "core/css/CSSQuadValue.h" #include "core/css/resolver/StyleBuilder.h" namespace blink { namespace { bool onlyInterpolateBetweenLengthAndCSSValueAuto(const CSSQuadValue& startRect, const CSSQuadValue& endRect) { return startRect.left()->isLength() != endRect.left()->isLength() && startRect.right()->isLength() != endRect.right()->isLength() && startRect.top()->isLength() != endRect.top()->isLength() && startRect.bottom()->isLength() != endRect.bottom()->isLength(); } } // namespace PassRefPtr<LengthBoxStyleInterpolation> LengthBoxStyleInterpolation::maybeCreateFrom(CSSValue& start, CSSValue& end, CSSPropertyID id) { bool startRect = start.isQuadValue() && toCSSQuadValue(start).serializationType() == CSSQuadValue::SerializationType::SerializeAsRect; bool endRect = end.isQuadValue() && toCSSQuadValue(end).serializationType() == CSSQuadValue::SerializationType::SerializeAsRect; if (startRect && endRect) return adoptRef(new LengthBoxStyleInterpolation(lengthBoxtoInterpolableValue(start, end, false), lengthBoxtoInterpolableValue(end, start, true), id, &start, &end)); return nullptr; } PassOwnPtr<InterpolableValue> LengthBoxStyleInterpolation::lengthBoxtoInterpolableValue(const CSSValue& lengthBox, const CSSValue& matchingValue, bool isEndInterpolation) { const int numberOfSides = 4; OwnPtr<InterpolableList> result = InterpolableList::create(numberOfSides); const CSSQuadValue& rect = toCSSQuadValue(lengthBox); const CSSQuadValue& matchingRect = toCSSQuadValue(matchingValue); CSSPrimitiveValue* side[numberOfSides] = { rect.left(), rect.right(), rect.top(), rect.bottom() }; CSSPrimitiveValue* matchingSide[numberOfSides] = { matchingRect.left(), matchingRect.right(), matchingRect.top(), matchingRect.bottom() }; for (size_t i = 0; i < numberOfSides; i++) { if (side[i]->isValueID() || matchingSide[i]->isValueID()) { result->set(i, InterpolableBool::create(isEndInterpolation)); } else { ASSERT(LengthStyleInterpolation::canCreateFrom(*side[i])); result->set(i, LengthStyleInterpolation::toInterpolableValue(*side[i])); } } return result.release(); } bool LengthBoxStyleInterpolation::usesDefaultInterpolation(const CSSValue& start, const CSSValue& end) { if (start.isPrimitiveValue() && end.isPrimitiveValue()) { const CSSPrimitiveValue& startValue = toCSSPrimitiveValue(start); const CSSPrimitiveValue& endValue = toCSSPrimitiveValue(end); return (startValue.isValueID() && startValue.getValueID() == CSSValueAuto) || (endValue.isValueID() && endValue.getValueID() == CSSValueAuto); } if (!start.isQuadValue() || !end.isQuadValue()) return false; const CSSQuadValue& startValue = toCSSQuadValue(start); const CSSQuadValue& endValue = toCSSQuadValue(end); return onlyInterpolateBetweenLengthAndCSSValueAuto(startValue, endValue); } namespace { PassRefPtrWillBeRawPtr<CSSPrimitiveValue> indexedValueToLength(InterpolableList& lengthBox, size_t i, CSSPrimitiveValue* start[], CSSPrimitiveValue* end[]) { if (lengthBox.get(i)->isBool()) { if (toInterpolableBool(lengthBox.get(i))->value()) return end[i]; return start[i]; } return LengthStyleInterpolation::fromInterpolableValue(*lengthBox.get(i), RangeAll); } } PassRefPtrWillBeRawPtr<CSSValue> LengthBoxStyleInterpolation::interpolableValueToLengthBox(InterpolableValue* value, const CSSValue& originalStart, const CSSValue& originalEnd) { InterpolableList* lengthBox = toInterpolableList(value); const CSSQuadValue& startRect = toCSSQuadValue(originalStart); const CSSQuadValue& endRect = toCSSQuadValue(originalEnd); CSSPrimitiveValue* startSides[4] = { startRect.left(), startRect.right(), startRect.top(), startRect.bottom() }; CSSPrimitiveValue* endSides[4] = { endRect.left(), endRect.right(), endRect.top(), endRect.bottom() }; RefPtrWillBeRawPtr<CSSPrimitiveValue> left = indexedValueToLength(*lengthBox, 0, startSides, endSides); RefPtrWillBeRawPtr<CSSPrimitiveValue> right = indexedValueToLength(*lengthBox, 1, startSides, endSides); RefPtrWillBeRawPtr<CSSPrimitiveValue> top = indexedValueToLength(*lengthBox, 2, startSides, endSides); RefPtrWillBeRawPtr<CSSPrimitiveValue> bottom = indexedValueToLength(*lengthBox, 3, startSides, endSides); return CSSQuadValue::create(top.release(), right.release(), bottom.release(), left.release(), CSSQuadValue::SerializeAsRect); } void LengthBoxStyleInterpolation::apply(StyleResolverState& state) const { if (m_cachedValue.get()->isBool()) StyleBuilder::applyProperty(m_id, state, toInterpolableBool(m_cachedValue.get())->value() ? m_endCSSValue.get() : m_startCSSValue.get()); else StyleBuilder::applyProperty(m_id, state, interpolableValueToLengthBox(m_cachedValue.get(), *m_startCSSValue, *m_endCSSValue).get()); } }
marferfer/SpinOff-LoL
Aplicacion Movil/generated/bundles/login-transition/build/Android/Preview/app/src/main/include/Fuse.Controls.Fallbac-2048a2c7.h
// This file was generated based on C:/Users/JuanJose/AppData/Local/Fusetools/Packages/Fuse.Controls.Primitives/1.9.0/TextControls/FallbackTextRenderer/TextRenderer.uno. // WARNING: Changes might be lost if you edit this file directly. #pragma once #include <Fuse.Controls.ITextRenderer.h> #include <Uno.Float2.h> #include <Uno.Object.h> #include <Uno.Rect.h> namespace g{namespace Fuse{namespace Controls{namespace FallbackTextRenderer{struct DefaultTextRenderer;}}}} namespace g{namespace Fuse{namespace Controls{namespace FallbackTextRenderer{struct TextRenderer;}}}} namespace g{namespace Fuse{namespace Controls{namespace FallbackTextRenderer{struct WordWrapInfo;}}}} namespace g{namespace Fuse{namespace Controls{namespace FallbackTextRenderer{struct WrappedLine;}}}} namespace g{namespace Fuse{namespace Controls{struct TextControl;}}} namespace g{namespace Fuse{struct DrawContext;}} namespace g{namespace Fuse{struct Font;}} namespace g{namespace Fuse{struct LayoutParams;}} namespace g{namespace Fuse{struct Visual;}} namespace g{namespace Uno{namespace Collections{struct Dictionary;}}} namespace g{namespace Uno{namespace Content{namespace Fonts{struct FontFace;}}}} namespace g{ namespace Fuse{ namespace Controls{ namespace FallbackTextRenderer{ // internal sealed class TextRenderer :10 // { struct TextRenderer_type : uType { ::g::Fuse::Controls::ITextRenderer interface0; }; TextRenderer_type* TextRenderer_typeof(); void TextRenderer__ctor__fn(TextRenderer* __this, ::g::Fuse::Controls::TextControl* text); void TextRenderer__Arrange_fn(TextRenderer* __this, ::g::Uno::Float2* position, ::g::Uno::Float2* size); void TextRenderer__Draw_fn(TextRenderer* __this, ::g::Fuse::DrawContext* dc, ::g::Fuse::Visual* where); void TextRenderer__GetContentSize_fn(TextRenderer* __this, ::g::Fuse::LayoutParams* lp, ::g::Uno::Float2* __retval); void TextRenderer__GetRenderBounds_fn(TextRenderer* __this, ::g::Uno::Rect* __retval); void TextRenderer__GetTextRenderer_fn(::g::Fuse::Font* f, ::g::Fuse::Controls::FallbackTextRenderer::DefaultTextRenderer** __retval); void TextRenderer__InitWrap_fn(TextRenderer* __this, float* wrapWidth, uString* value, bool* useMin); void TextRenderer__Invalidate_fn(TextRenderer* __this); void TextRenderer__LoadFont_fn(::g::Fuse::Font* font, ::g::Uno::Content::Fonts::FontFace** __retval); void TextRenderer__New1_fn(::g::Fuse::Controls::TextControl* text, TextRenderer** __retval); void TextRenderer__SoftDispose_fn(TextRenderer* __this); void TextRenderer__UpdateArrange_fn(TextRenderer* __this); struct TextRenderer : uObject { uStrong< ::g::Fuse::Controls::TextControl*> Control; static uSStrong< ::g::Uno::Collections::Dictionary*> _textRenderers_; static uSStrong< ::g::Uno::Collections::Dictionary*>& _textRenderers() { return TextRenderer_typeof()->Init(), _textRenderers_; } uStrong< ::g::Fuse::Controls::FallbackTextRenderer::WordWrapInfo*> _wrapInfo; uStrong<uArray*> _wrappedLines; float _wrapWidth; ::g::Uno::Rect _textBounds; int32_t _maxTextLength; uStrong<uString*> _cacheValue; bool _cacheMin; ::g::Uno::Float2 _position; ::g::Uno::Float2 _size; void ctor_(::g::Fuse::Controls::TextControl* text); void Arrange(::g::Uno::Float2 position, ::g::Uno::Float2 size); void Draw(::g::Fuse::DrawContext* dc, ::g::Fuse::Visual* where); ::g::Uno::Float2 GetContentSize(::g::Fuse::LayoutParams lp); ::g::Uno::Rect GetRenderBounds(); void InitWrap(float wrapWidth, uString* value, bool useMin); void Invalidate(); void SoftDispose(); void UpdateArrange(); static ::g::Fuse::Controls::FallbackTextRenderer::DefaultTextRenderer* GetTextRenderer(::g::Fuse::Font* f); static ::g::Uno::Content::Fonts::FontFace* LoadFont(::g::Fuse::Font* font); static TextRenderer* New1(::g::Fuse::Controls::TextControl* text); }; // } }}}} // ::g::Fuse::Controls::FallbackTextRenderer
SebastianTirado/Cpp-Learning-Archive
Deitel/Chapter10/exercises/10.10/Card.cpp
<reponame>SebastianTirado/Cpp-Learning-Archive<gh_stars>10-100 /* * ===================================================================================== * * Filename: * * Description: * * Version: 1.0 * Created: Thanks to github you know it * Revision: none * Compiler: g++ * * Author: <NAME> <EMAIL> * * * ===================================================================================== */ #include <string> #include "Card.hpp" std::string Card::suits[5] = {"", "clubs", "diamonds", "hearts", "spades"}; std::string Card::faces[14] = {"", "ace", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "jack", "queen", "king"}; Card::Card(int f, int s) { face = f; suit = s; } std::string Card::toString() const { return faces[face] + " of " + suits[suit]; }
AVISPL/dal-codecs-singlecodecs-cisco
src/main/java/com/avispl/dal/communicator/cisco/dto/configuration/video/VideoConfigurationConnector.java
/* * Copyright (c) 2021 AVI-SPL Inc. All Rights Reserved. */ package com.avispl.dal.communicator.cisco.dto.configuration.video; import com.avispl.dal.communicator.cisco.dto.ValueSpaceRefHolder; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; /** * Device Video Configuration Connector class. * Serves both for collecting data and changing device's config. * * @author Maksym.Rossiitsev / Symphony Dev Team<br> * Created on Apr 26, 2021 * @since 1.0 */ @XmlAccessorType(XmlAccessType.NONE) public class VideoConfigurationConnector { @XmlAttribute(name = "item") private String item; @XmlElement(name = "CameraControl") private VideoConfigurationCameraControl cameraControl; @XmlElement(name = "InputSourceType") private ValueSpaceRefHolder inputSourceType; @XmlElement(name = "Name") private ValueSpaceRefHolder name; @XmlElement(name = "PresentationSelection") private ValueSpaceRefHolder presentationSelection; @XmlElement(name = "Quality") private ValueSpaceRefHolder quality; @XmlElement(name = "Visibility") private ValueSpaceRefHolder visibility; @XmlElement(name = "MonitorRole") private ValueSpaceRefHolder monitorRole; @XmlElement(name = "OverscanLevel") private ValueSpaceRefHolder overscanLevel; @XmlElement(name = "Resolution") private ValueSpaceRefHolder resolution; @XmlElement(name = "CEC") private VideoConfigurationCEC cec; /** * Retrieves {@code {@link #item}} * * @return value of {@link #item} */ public String getItem() { return item; } /** * Sets {@code item} * * @param item the {@code java.lang.String} field */ public void setItem(String item) { this.item = item; } /** * Retrieves {@code {@link #cameraControl}} * * @return value of {@link #cameraControl} */ public VideoConfigurationCameraControl getCameraControl() { return cameraControl; } /** * Sets {@code cameraControl} * * @param cameraControl the {@code com.avispl.dal.communicator.cisco.dto.configuration.video.VideoConfigurationCameraControl} field */ public void setCameraControl(VideoConfigurationCameraControl cameraControl) { this.cameraControl = cameraControl; } /** * Retrieves {@code {@link #inputSourceType}} * * @return value of {@link #inputSourceType} */ public ValueSpaceRefHolder getInputSourceType() { return inputSourceType; } /** * Sets {@code inputSourceType} * * @param inputSourceType the {@code com.avispl.dal.communicator.cisco.dto.ValueSpaceRefHolder} field */ public void setInputSourceType(ValueSpaceRefHolder inputSourceType) { this.inputSourceType = inputSourceType; } /** * Retrieves {@code {@link #name}} * * @return value of {@link #name} */ public ValueSpaceRefHolder getName() { return name; } /** * Sets {@code name} * * @param name the {@code com.avispl.dal.communicator.cisco.dto.ValueSpaceRefHolder} field */ public void setName(ValueSpaceRefHolder name) { this.name = name; } /** * Retrieves {@code {@link #presentationSelection}} * * @return value of {@link #presentationSelection} */ public ValueSpaceRefHolder getPresentationSelection() { return presentationSelection; } /** * Sets {@code presentationSelection} * * @param presentationSelection the {@code com.avispl.dal.communicator.cisco.dto.ValueSpaceRefHolder} field */ public void setPresentationSelection(ValueSpaceRefHolder presentationSelection) { this.presentationSelection = presentationSelection; } /** * Retrieves {@code {@link #quality}} * * @return value of {@link #quality} */ public ValueSpaceRefHolder getQuality() { return quality; } /** * Sets {@code quality} * * @param quality the {@code com.avispl.dal.communicator.cisco.dto.ValueSpaceRefHolder} field */ public void setQuality(ValueSpaceRefHolder quality) { this.quality = quality; } /** * Retrieves {@code {@link #visibility}} * * @return value of {@link #visibility} */ public ValueSpaceRefHolder getVisibility() { return visibility; } /** * Sets {@code visibility} * * @param visibility the {@code com.avispl.dal.communicator.cisco.dto.ValueSpaceRefHolder} field */ public void setVisibility(ValueSpaceRefHolder visibility) { this.visibility = visibility; } /** * Retrieves {@code {@link #monitorRole}} * * @return value of {@link #monitorRole} */ public ValueSpaceRefHolder getMonitorRole() { return monitorRole; } /** * Sets {@code monitorRole} * * @param monitorRole the {@code com.avispl.dal.communicator.cisco.dto.ValueSpaceRefHolder} field */ public void setMonitorRole(ValueSpaceRefHolder monitorRole) { this.monitorRole = monitorRole; } /** * Retrieves {@code {@link #overscanLevel}} * * @return value of {@link #overscanLevel} */ public ValueSpaceRefHolder getOverscanLevel() { return overscanLevel; } /** * Sets {@code overscanLevel} * * @param overscanLevel the {@code com.avispl.dal.communicator.cisco.dto.ValueSpaceRefHolder} field */ public void setOverscanLevel(ValueSpaceRefHolder overscanLevel) { this.overscanLevel = overscanLevel; } /** * Retrieves {@code {@link #resolution}} * * @return value of {@link #resolution} */ public ValueSpaceRefHolder getResolution() { return resolution; } /** * Sets {@code resolution} * * @param resolution the {@code com.avispl.dal.communicator.cisco.dto.ValueSpaceRefHolder} field */ public void setResolution(ValueSpaceRefHolder resolution) { this.resolution = resolution; } /** * Retrieves {@code {@link #cec}} * * @return value of {@link #cec} */ public VideoConfigurationCEC getCec() { return cec; } /** * Sets {@code cec} * * @param cec the {@code com.avispl.dal.communicator.cisco.dto.configuration.video.VideoConfigurationCEC} field */ public void setCec(VideoConfigurationCEC cec) { this.cec = cec; } }
masud-technope/ACER-Replication-Package-ASE2017
corpus/class/eclipse.jdt.core/3658.java
<reponame>masud-technope/ACER-Replication-Package-ASE2017 /******************************************************************************* * Copyright (c) 2000, 2016 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Common Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/cpl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.jdt.internal.compiler.classfmt; import org.eclipse.jdt.core.compiler.CharOperation; import org.eclipse.jdt.internal.compiler.codegen.AttributeNamesConstants; import org.eclipse.jdt.internal.compiler.env.IBinaryField; import org.eclipse.jdt.internal.compiler.impl.BooleanConstant; import org.eclipse.jdt.internal.compiler.impl.ByteConstant; import org.eclipse.jdt.internal.compiler.impl.CharConstant; import org.eclipse.jdt.internal.compiler.impl.Constant; import org.eclipse.jdt.internal.compiler.impl.DoubleConstant; import org.eclipse.jdt.internal.compiler.impl.FloatConstant; import org.eclipse.jdt.internal.compiler.impl.IntConstant; import org.eclipse.jdt.internal.compiler.impl.LongConstant; import org.eclipse.jdt.internal.compiler.impl.ShortConstant; import org.eclipse.jdt.internal.compiler.impl.StringConstant; import org.eclipse.jdt.internal.compiler.lookup.TypeIds; import org.eclipse.jdt.internal.compiler.util.Util; public class FieldInfo extends ClassFileStruct implements AttributeNamesConstants, IBinaryField, Comparable, TypeIds { private int accessFlags; private int attributeBytes; private Constant constant; private int[] constantPoolOffsets; private char[] descriptor; private char[] name; private Object wrappedConstantValue; private char[] signature; private int signatureUtf8Offset; /** * @param classFileBytes byte[] * @param offsets int[] * @param offset int */ public FieldInfo(byte classFileBytes[], int offsets[], int offset) { super(classFileBytes, offset); constantPoolOffsets = offsets; accessFlags = -1; int attributesCount = u2At(6); int readOffset = 8; this.signatureUtf8Offset = -1; for (int i = 0; i < attributesCount; i++) { // check the name of each attribute int utf8Offset = constantPoolOffsets[u2At(readOffset)] - structOffset; char[] attributeName = utf8At(utf8Offset + 3, u2At(utf8Offset + 1)); if (CharOperation.equals(AttributeNamesConstants.SignatureName, attributeName)) { this.signatureUtf8Offset = constantPoolOffsets[u2At(readOffset + 6)] - structOffset; } readOffset += (6 + u4At(readOffset + 2)); } attributeBytes = readOffset; } public int compareTo(Object o) { if (!(o instanceof FieldInfo)) { throw new ClassCastException(); } return new String(this.getName()).compareTo(new String(((FieldInfo) o).getName())); } /** * Return the constant of the field. * Return org.eclipse.jdt.internal.compiler.impl.Constant.NotAConstant if there is none. * @return org.eclipse.jdt.internal.compiler.impl.Constant */ public Constant getConstant() { if (constant == null) { // read constant readConstantAttribute(); } return constant; } public char[] getGenericSignature() { if (this.signatureUtf8Offset != -1) { if (this.signature == null) { // decode the signature this.signature = utf8At(this.signatureUtf8Offset + 3, u2At(this.signatureUtf8Offset + 1)); } return this.signature; } return null; } /** * Answer an int whose bits are set according the access constants * defined by the VM spec. * Set the AccDeprecated and AccSynthetic bits if necessary * @return int */ public int getModifiers() { if (this.accessFlags == -1) { // compute the accessflag. Don't forget the deprecated attribute this.accessFlags = u2At(0); readDeprecatedAndSyntheticAttributes(); } return this.accessFlags; } /** * Answer the name of the field. * @return char[] */ public char[] getName() { if (name == null) { // read the name int utf8Offset = constantPoolOffsets[u2At(2)] - structOffset; name = utf8At(utf8Offset + 3, u2At(utf8Offset + 1)); } return name; } /** * Answer the resolved name of the receiver's type in the * class file format as specified in section 4.3.2 of the Java 2 VM spec. * * For example: * - java.lang.String is Ljava/lang/String; * - an int is I * - a 2 dimensional array of strings is [[Ljava/lang/String; * - an array of floats is [F * @return char[] */ public char[] getTypeName() { if (descriptor == null) { // read the signature int utf8Offset = constantPoolOffsets[u2At(4)] - structOffset; descriptor = utf8At(utf8Offset + 3, u2At(utf8Offset + 1)); } return descriptor; } /** * Return a wrapper that contains the constant of the field. * @return java.lang.Object */ public Object getWrappedConstantValue() { if (this.wrappedConstantValue == null) { if (hasConstant()) { Constant fieldConstant = getConstant(); switch(fieldConstant.typeID()) { case T_int: this.wrappedConstantValue = Integer.valueOf(fieldConstant.intValue()); break; case T_byte: this.wrappedConstantValue = Byte.valueOf(fieldConstant.byteValue()); break; case T_short: this.wrappedConstantValue = Short.valueOf(fieldConstant.shortValue()); break; case T_char: this.wrappedConstantValue = Character.valueOf(fieldConstant.charValue()); break; case T_float: this.wrappedConstantValue = new Float(fieldConstant.floatValue()); break; case T_double: this.wrappedConstantValue = new Double(fieldConstant.doubleValue()); break; case T_boolean: this.wrappedConstantValue = Util.toBoolean(fieldConstant.booleanValue()); break; case T_long: this.wrappedConstantValue = Long.valueOf(fieldConstant.longValue()); break; case T_String: this.wrappedConstantValue = fieldConstant.stringValue(); } } } return this.wrappedConstantValue; } /** * Return true if the field has a constant value attribute, false otherwise. * @return boolean */ public boolean hasConstant() { return getConstant() != Constant.NotAConstant; } /** * This method is used to fully initialize the contents of the receiver. All methodinfos, fields infos * will be therefore fully initialized and we can get rid of the bytes. */ void initialize() { getModifiers(); getName(); getConstant(); getTypeName(); getGenericSignature(); reset(); } /** * Return true if the field is a synthetic field, false otherwise. * @return boolean */ public boolean isSynthetic() { return (getModifiers() & AccSynthetic) != 0; } private void readConstantAttribute() { int attributesCount = u2At(6); int readOffset = 8; boolean isConstant = false; for (int i = 0; i < attributesCount; i++) { int utf8Offset = constantPoolOffsets[u2At(readOffset)] - structOffset; char[] attributeName = utf8At(utf8Offset + 3, u2At(utf8Offset + 1)); if (CharOperation.equals(attributeName, ConstantValueName)) { isConstant = true; // read the right constant int relativeOffset = constantPoolOffsets[u2At(readOffset + 6)] - structOffset; switch(u1At(relativeOffset)) { case IntegerTag: char[] sign = getTypeName(); if (sign.length == 1) { switch(sign[0]) { // boolean constant case 'Z': constant = new BooleanConstant(i4At(relativeOffset + 1) == 1); break; // integer constant case 'I': constant = new IntConstant(i4At(relativeOffset + 1)); break; // char constant case 'C': constant = new CharConstant((char) i4At(relativeOffset + 1)); break; // byte constant case 'B': constant = new ByteConstant((byte) i4At(relativeOffset + 1)); break; // short constant case 'S': constant = new ShortConstant((short) i4At(relativeOffset + 1)); break; default: constant = Constant.NotAConstant; } } else { constant = Constant.NotAConstant; } break; case FloatTag: constant = new FloatConstant(floatAt(relativeOffset + 1)); break; case DoubleTag: constant = new DoubleConstant(doubleAt(relativeOffset + 1)); break; case LongTag: constant = new LongConstant(i8At(relativeOffset + 1)); break; case StringTag: utf8Offset = constantPoolOffsets[u2At(relativeOffset + 1)] - structOffset; constant = new StringConstant(String.valueOf(utf8At(utf8Offset + 3, u2At(utf8Offset + 1)))); break; } } readOffset += (6 + u4At(readOffset + 2)); } if (!isConstant) { constant = Constant.NotAConstant; } } private void readDeprecatedAndSyntheticAttributes() { int attributesCount = u2At(6); int readOffset = 8; for (int i = 0; i < attributesCount; i++) { int utf8Offset = constantPoolOffsets[u2At(readOffset)] - structOffset; char[] attributeName = utf8At(utf8Offset + 3, u2At(utf8Offset + 1)); if (CharOperation.equals(attributeName, DeprecatedName)) { this.accessFlags |= AccDeprecated; } else if (CharOperation.equals(attributeName, SyntheticName)) { this.accessFlags |= AccSynthetic; } readOffset += (6 + u4At(readOffset + 2)); } } protected void reset() { this.constantPoolOffsets = null; super.reset(); } /** * Answer the size of the receiver in bytes. * * @return int */ public int sizeInBytes() { return attributeBytes; } public void throwFormatException() throws ClassFormatException { throw new ClassFormatException(ClassFormatException.ErrBadFieldInfo); } public String toString() { StringBuffer buffer = new StringBuffer(this.getClass().getName()); int modifiers = getModifiers(); return buffer.append(//$NON-NLS-1$ "{").append(//$NON-NLS-1$ //$NON-NLS-2$ ((modifiers & AccDeprecated) != 0 ? "deprecated " : "") + //$NON-NLS-1$ //$NON-NLS-2$ ((modifiers & 0x0001) == 1 ? "public " : "") + //$NON-NLS-1$ //$NON-NLS-2$ ((modifiers & 0x0002) == 0x0002 ? "private " : "") + //$NON-NLS-1$ //$NON-NLS-2$ ((modifiers & 0x0004) == 0x0004 ? "protected " : "") + //$NON-NLS-1$ //$NON-NLS-2$ ((modifiers & 0x0008) == 0x000008 ? "static " : "") + //$NON-NLS-1$ //$NON-NLS-2$ ((modifiers & 0x0010) == 0x0010 ? "final " : "") + //$NON-NLS-1$ //$NON-NLS-2$ ((modifiers & 0x0040) == 0x0040 ? "volatile " : "") + //$NON-NLS-1$ //$NON-NLS-2$ ((modifiers & 0x0080) == 0x0080 ? "transient " : "")).append(getTypeName()).append(//$NON-NLS-1$ " ").append(getName()).append(//$NON-NLS-1$ " ").append(getConstant()).append(//$NON-NLS-1$ "}").toString(); } }
MarginC/kame
netbsd/sys/arch/sun68k/stand/bootxx/bootxx.c
/* $NetBSD: bootxx.c,v 1.8 2002/05/15 09:44:55 lukem Exp $ */ /*- * Copyright (c) 1998 The NetBSD Foundation, Inc. * All rights reserved. * * This code is derived from software contributed to The NetBSD Foundation * by <NAME>. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * This product includes software developed by the NetBSD * Foundation, Inc. and its contributors. * 4. Neither the name of The NetBSD Foundation nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /* * This is a generic "first-stage" boot program. * * Note that this program has absolutely no filesystem knowledge! * * Instead, this uses a table of disk block numbers that are * filled in by the installboot program such that this program * can load the "second-stage" boot program. */ #include <sys/param.h> #include <sys/bootblock.h> #include <machine/mon.h> #include <stand.h> #include "libsa.h" /* * This is the address where we load the second-stage boot loader. */ #define LOADADDR 0x4000 /* * The contents of the sun68k_bbinfo below are set by installboot(8) * to hold the filesystem data of the second-stage boot program * (typically `/ufsboot'): filesystem block size, # of filesystem * blocks and the block numbers themselves. */ struct shared_bbinfo bbinfo = { { SUN68K_BBINFO_MAGIC }, 0, SHARED_BBINFO_MAXBLOCKS, { 0 } }; int main() { struct open_file f; void *entry; char *addr; int n, error; #ifdef DEBUG printf("bootxx: open...\n"); #endif f.f_flags = F_RAW; if (devopen(&f, 0, &addr)) { putstr("bootxx: devopen failed\n"); return; } addr = (char*)LOADADDR; error = copyboot(&f, addr); f.f_dev->dv_close(&f); if (!error) { #ifdef DEBUG printf("bootxx: start 0x%x\n", (long)addr); #endif entry = addr; chain_to(entry); } /* copyboot had a problem... */ return; } int copyboot(fp, addr) struct open_file *fp; char *addr; { int n, i, blknum; char *buf; /* Need to use a buffer that can be mapped into DVMA space. */ buf = alloc(bbinfo.bbi_block_size); if (!buf) panic("bootxx: alloc failed"); for (i = 0; i < bbinfo.bbi_block_count; i++) { if ((blknum = bbinfo.bbi_block_table[i]) == 0) break; #ifdef DEBUG printf("bootxx: block # %d = %d\n", i, blknum); #endif if ((fp->f_dev->dv_strategy)(fp->f_devdata, F_READ, blknum, bbinfo.bbi_block_size, buf, &n)) { putstr("bootxx: read failed\n"); return -1; } if (n != bbinfo.bbi_block_size) { putstr("bootxx: short read\n"); return -1; } bcopy(buf, addr, bbinfo.bbi_block_size); addr += bbinfo.bbi_block_size; } return 0; }
LeandroTk/Algorithms
competitive_programming/programming_contests/uri/help_girafales.cpp
// https://www.urionlinejudge.com.br/judge/en/problems/view/1911 #include <iostream> #include <string> #include <vector> using namespace std; int get_index(vector< pair<string, string> > names, string name) { for (int i = 0; i < names.size(); i++) { if (names[i].first == name) return i; } } int main() { vector< pair<string, string> > names; int n, m, counter = 0; string name, signature; cin >> n; while (n != 0) { while (n--) { cin >> name >> signature; names.push_back(make_pair(name, signature)); } cin >> m; for (int i = 0; i < m; i++) { cin >> name >> signature; bool falsy = false; int error_counter = 0; int ind = get_index(names, name); for (int j = 0; j < signature.size(); j++) { //cout << names[i].second[j] << " " << signature[j] << endl; if (names[ind].second[j] != signature[j]) error_counter++; if (error_counter > 1) { falsy = true; break; } } if (falsy) counter++; } cout << counter << endl; counter = 0; cin >> n; } return 0; }
LinkinW92/htd
mes-service/src/main/java/com/skeqi/mes/mapper/chenj/srm/CSrmPurchaseOrderHMapper.java
<filename>mes-service/src/main/java/com/skeqi/mes/mapper/chenj/srm/CSrmPurchaseOrderHMapper.java package com.skeqi.mes.mapper.chenj.srm; import com.skeqi.mes.pojo.chenj.srm.CSrmKThreePurchaseAbutting; import com.skeqi.mes.pojo.chenj.srm.CSrmPurchaseOrderH; import com.skeqi.mes.pojo.chenj.srm.kthree.KThreePOOrder; import com.skeqi.mes.pojo.chenj.srm.req.CSrmPurchaseOrderHFindReq; import com.skeqi.mes.pojo.chenj.srm.req.CSrmPurchaseOrderHReq; import com.skeqi.mes.pojo.chenj.srm.rsp.CSrmPurchaseOrderHRsp; import org.apache.ibatis.annotations.Param; import java.util.List; /** * @author ChenJ * @date 2021/6/10 * @Classname CSrmPurchaseOrderHMapper * @Description ${Description} */ public interface CSrmPurchaseOrderHMapper { int deleteByPrimaryKey(Integer id); int insert(CSrmPurchaseOrderH record); int delCSrmPurchaseOrderH(CSrmPurchaseOrderHReq cSrmPurchaseOrderHReq); int insertOrUpdate(CSrmPurchaseOrderH record); int insertOrUpdateSelective(CSrmPurchaseOrderH record); int insertSelective(CSrmPurchaseOrderH record); List<CSrmPurchaseOrderH> selectByPrimaryKey(CSrmPurchaseOrderH record); int updateByPrimaryKeySelective(CSrmPurchaseOrderH record); int updateByPrimaryKey(CSrmPurchaseOrderH record); int updateBatch(List<CSrmPurchaseOrderH> list); int updateBatchSelective(List<CSrmPurchaseOrderH> list); /** * 更新采购订单头表为已创建 * @param list * @return */ int updateBatchSelectiveStatusTrue(List<CSrmPurchaseOrderH> list); /** * 更新采购订单头表为未创建 * @param list * @return */ int updateBatchSelectiveStatusFalse(List<CSrmPurchaseOrderH> list); int batchInsert(@Param("list") List<CSrmPurchaseOrderH> list); CSrmPurchaseOrderH selectFinallyData(); CSrmPurchaseOrderHRsp selectCSrmPurchaseOrderH(CSrmPurchaseOrderH record); CSrmPurchaseOrderHRsp selectCSrmPurchaseOrderHKThree(CSrmPurchaseOrderH record); List<CSrmPurchaseOrderHRsp> selectByPrimaryList(CSrmPurchaseOrderHFindReq req); int batchInsertKThree(@Param("list") List<KThreePOOrder> list); int updateBatchSelectiveKThree(List<KThreePOOrder> list); int delKThreeData(@Param("list") List<CSrmKThreePurchaseAbutting> list); }
mahajrod/MAVR
scripts/filter/restore_pairs.py
#!/usr/bin/env python __author__ = '<NAME>' """ Script is very slow because it uses Biopython SeqIO to parse fastq. Use it only for small fastqs """ import os import re import sys import argparse from Bio import SeqIO from RouToolPa.Routines import SequenceRoutines def get_list_from_string(s): return s.split(",") parser = argparse.ArgumentParser() parser.add_argument("-l", "--input_left", action="store", dest="input_left", type=get_list_from_string, required=True, help="Comma-separated list of files with left reads") parser.add_argument("-r", "--input_right", action="store", dest="input_right", type=get_list_from_string, required=True, help="Comma-separated list of files with left reads") parser.add_argument("-o", "--out_prefix", action="store", dest="out_prefix", required=True, help="Prefix of output files") args = parser.parse_args() out_left = "%s_1.fastq" % args.out_prefix out_right = "%s_2.fastq" % args.out_prefix out_left_se = "%s_1.se.fastq" % args.out_prefix out_right_se = "%s_2.se.fastq" % args.out_prefix out_left_fd = open("%s_1.fastq" % args.out_prefix, "w") out_right_fd = open("%s_2.fastq" % args.out_prefix, "w") out_left_se_fd = open("%s_1.se.fastq" % args.out_prefix, "w") out_right_se_fd = open("%s_2.se.fastq" % args.out_prefix, "w") left_input_reads_dict = SeqIO.index_db("left_in_reads.idx", args.input_left, "fastq") right_input_reads_dict = SeqIO.index_db("right_in_reads.idx", args.input_right, "fastq") left_input_set = set(left_input_reads_dict.keys()) right_input_set = set(right_input_reads_dict.keys()) SeqIO.write(SequenceRoutines.record_by_id_generator(left_input_reads_dict, sorted(left_input_set & right_input_set), verbose=True), out_left, "fastq") SeqIO.write(SequenceRoutines.record_by_id_generator(right_input_reads_dict, sorted(left_input_set & right_input_set), verbose=True), out_right, "fastq") SeqIO.write(SequenceRoutines.record_by_id_generator(left_input_reads_dict, left_input_set - right_input_set, verbose=True), out_left_se, "fastq") SeqIO.write(SequenceRoutines.record_by_id_generator(right_input_reads_dict, right_input_set - left_input_set, verbose=True), out_right_se, "fastq") out_left_fd.close() out_right_fd.close() out_left_se_fd.close() out_right_se_fd.close() os.remove("left_in_reads.idx") os.remove("right_in_reads.idx")
dstore-dbap/LumberMill
lumbermill/parser/MsgPack.py
<gh_stars>10-100 # -*- coding: utf-8 -*- import sys import msgpack from lumbermill.BaseThreadedModule import BaseThreadedModule from lumbermill.utils.Decorators import ModuleDocstringParser @ModuleDocstringParser class MsgPack(BaseThreadedModule): """ Decode: It will parse the msgpack data and create or replace fields in the internal data dictionary with the corresponding json fields. Encode: Encode selected fields or all to msgpack format. Configuration template: - parser.MsgPack: action: # <default: 'decode'; type: string; values: ['decode','encode']; is: optional> mode: # <default: 'line'; type: string; values: ['line','stream']; is: optional> source_fields: # <default: 'data'; type: string||list; is: optional> target_field: # <default: None; type: None||string; is: required if action is 'encode' or keep_original is False else optional> keep_original: # <default: False; type: boolean; is: optional> receivers: - NextModule """ module_type = "parser" """Set module type""" def configure(self, configuration): # Call parent configure method BaseThreadedModule.configure(self, configuration) self.source_fields = self.getConfigurationValue('source_fields') # Allow single string as well. if isinstance(self.source_fields, str): self.source_fields = [self.source_fields] self.target_field = self.getConfigurationValue('target_field') self.drop_original = not self.getConfigurationValue('keep_original') if self.drop_original and not self.target_field: self.logger.error("Module configured to drop original field after decoding but no target field set. Please either set target_field or set keep_original to True.") self.lumbermill.shutDown() if self.getConfigurationValue('action') == 'decode': if self.getConfigurationValue('mode') == 'line': self.handleEvent = self.decodeEventLine else: self.logger.warning("Stream mode is currently broken. Sorry!") self.lumbermill.shutDown() self.unpacker = msgpack.Unpacker(raw=False) self.handleEvent = self.decodeEventStream else: self.handleEvent = self.encodeEvent def decodeEventStream(self, event): for source_field in self.source_fields: try: data = event[source_field] except KeyError: continue try: self.unpacker.feed(data) except TypeError: try: self.unpacker.feed(bytes(data, "utf-8")) except TypeError: continue; # If decoded data contains more than one event, we need to clone all events but the first one. # Otherwise we will have multiple events with the same event_id. # KeyDotNotationDict.copy method will take care of creating a new event id. for events_count, decoded_data in enumerate(self.unpacker): if events_count > 1: event = event.copy() if not isinstance(decoded_data, dict): continue if self.drop_original: event.pop(source_field, None) if self.target_field: event[self.target_field] = decoded_data else: try: event.update(decoded_data) except: etype, evalue, etb = sys.exc_info() self.logger.warning("Could not update event with msgpack data: %s. Exception: %s, Error: %s." % (decoded_data, etype, evalue)) yield event def decodeEventLine(self, event): for source_field in self.source_fields: try: decoded_data = msgpack.unpackb(event[source_field], raw=False) except KeyError: continue except: etype, evalue, etb = sys.exc_info() self.logger.warning("Could not parse msgpack event data: %s. Exception: %s, Error: %s." % (event[source_field], etype, evalue)) continue if self.drop_original: event.pop(source_field, None) if self.target_field: event.update({self.target_field: decoded_data}) else: try: event.update(decoded_data) except: etype, evalue, etb = sys.exc_info() self.logger.warning("Could not update event with msgpack data: %s. Exception: %s, Error: %s." % (decoded_data, etype, evalue)) yield event def encodeEvent(self, event): if self.source_fields == ['all']: encode_data = event else: encode_data = {} for source_field in self.source_fields: try: encode_data[source_field] = event[source_field] except KeyError: continue if self.drop_original: event.pop(source_field, None) try: encode_data = msgpack.packb(encode_data) except: etype, evalue, etb = sys.exc_info() self.logger.warning("Could not msgpack encode event data: %s. Exception: %s, Error: %s." % (event, etype, evalue)) yield event return if self.source_fields == ['all']: event = encode_data else: event.update({self.target_field: encode_data}) yield event
youngqqcn/LeetCodeNodes
src/0039_combination_sum/combination_sum.cc
<gh_stars>1-10 // author: yqq // date: 2021-05-31 21:15:48 // descriptions: https://leetcode-cn.com/problems/combination-sum/ #include <iostream> #include <vector> #include <string> #include <map> #include <set> #include <algorithm> #include <memory> using namespace std; /* 给定一个无重复元素的正整数数组 candidates 和一个正整数 target ,找出 candidates 中所有可以使数字和为目标数 target 的唯一组合。 candidates 中的数字可以无限制重复被选取。如果至少一个所选数字数量不同,则两种组合是唯一的。  对于给定的输入,保证和为 target 的唯一组合数少于 150 个。 示例 1: 输入: candidates = [2,3,6,7], target = 7 输出: [[7],[2,2,3]] 示例 2: 输入: candidates = [2,3,5], target = 8 输出: [[2,2,2,2],[2,3,3],[3,5]] 示例 3: 输入: candidates = [2], target = 1 输出: [] 示例 4: 输入: candidates = [1], target = 1 输出: [[1]] 示例 5: 输入: candidates = [1], target = 2 输出: [[1,1]]   提示: 1 <= candidates.length <= 30 1 <= candidates[i] <= 200 candidate 中的每个元素都是独一无二的。 1 <= target <= 500 */ class Solution { private: void combination(vector<int> &candidates, int target, int sum, vector<int> &nums, set<vector<int>> &result) { for (int i = 0; i < candidates.size(); i++) { if (sum + candidates[i] == target) { vector<int> tmp(nums.begin(), nums.end()); tmp.push_back(candidates[i]); sort(tmp.begin(), tmp.end()); result.insert(tmp); // 如果没有对candidates进行过排序, 这里不能直接return, 应该继续向后查找, 否则会漏掉组合 return; // 因为实现对candidates进行了排序,所以后面的数大于等于当前数, } else if (sum + candidates[i] > target) { // continue; // 如果没有对candidates进行过排序, 则应继续向后查找 // 因为事先对candidates进行了排序, 所以后面的数会当前数大, 不必在继续了 return; } else if (sum + candidates[i] < target) { nums.push_back(candidates[i]); combination(candidates, target, sum + candidates[i], nums, result); nums.pop_back(); // 回溯 } } } public: vector<vector<int>> combinationSum(vector<int> &candidates, int target) { vector<int> nums; set<vector<int>> result; sort(candidates.begin(), candidates.end()); combination(candidates, target, 0, nums, result); return vector<vector<int>>(result.begin(), result.end()); } }; void test(vector<int> candidates, int target, set<vector<int>> expected) { Solution sol; auto result = sol.combinationSum(candidates, target); if (result.size() != expected.size()) { cout << "FAILED" << endl; return; } for (auto v : result) { if (expected.find(v) == expected.end()) { cout << "FAILED" << endl; return; } expected.erase(v); // 防止重复 } cout << "PASSED" << endl; } int main() { test({2, 3, 6, 7}, 7, {{7}, {2, 2, 3}}); test({2, 3, 5}, 8, {{2, 2, 2, 2}, {2, 3, 3}, {3, 5}}); test({2, 7, 6, 3, 5, 1}, 9, {{1, 1, 1, 1, 1, 1, 1, 1, 1}, {1, 1, 1, 1, 1, 1, 1, 2}, {1, 1, 1, 1, 1, 1, 3}, {1, 1, 1, 1, 1, 2, 2}, {1, 1, 1, 1, 2, 3}, {1, 1, 1, 1, 5}, {1, 1, 1, 2, 2, 2}, {1, 1, 1, 3, 3}, {1, 1, 1, 6}, {1, 1, 2, 2, 3}, {1, 1, 2, 5}, {1, 1, 7}, {1, 2, 2, 2, 2}, {1, 2, 3, 3}, {1, 2, 6}, {1, 3, 5}, {2, 2, 2, 3}, {2, 2, 5}, {2, 7}, {3, 3, 3}, {3, 6}}); // cout << "hello world" << endl; return 0; }
King0987654/windows2000
private/windows/shell/accesory/ole2pbsh/parentwp.c
/****************************Module*Header******************************\ * Copyright (c) 1987 - 1991 Microsoft Corporation * \***********************************************************************/ /******************************************************** * * * file: ParentWP.c * * system: PC Paintbrush for MS-Windows * * descr: window proc for parent window * * date: 03/18/87 @ 11:00 * * * ********************************************************/ #include <windows.h> #include <port1632.h> #include <shellapi.h> #ifdef DBCS_IME #include <ime.h> #endif #include "oleglue.h" #include "pbrush.h" #ifdef DBCS_IME #include <winnls.h> #endif int UpdateCount = 0; #ifdef DBCS_IME /* IME can only be used when user selects text function */ BOOL bInitialIMEState; BOOL bGetIMEState = FALSE; // 02/12/93 raid #3725 #endif extern HWND pbrushWnd[]; extern int defaultWid, defaultHgt; extern BOOL drawing; extern HWND zoomOutWnd; extern RECT pbrushRct[]; extern BOOL inMagnify, mouseFlag, bZoomedOut; extern BOOL gfDirty; extern TCHAR fileName[]; extern TCHAR noFile[]; extern int theTool, theSize, theForeg; extern BOOL bIsPrinterDefault; extern TCHAR deviceStr[]; extern HPALETTE hPalette; extern BOOL bJustActivated; extern int theBackg; extern DWORD *rgbColor; extern int SizeTable[]; extern int YPosTable[]; extern HWND mouseWnd; extern BOOL TerminateKill; extern BOOL IsCanceled; extern WNDPROC lpMouseDlg, lpColorDlg, lpNullWP; extern BOOL bPrtCreateErr; extern PRINTDLG PD; #define SIZE_FUDGE 7 void FileDragOpen(TCHAR szPath[]); void doDrop(HANDLE wParam, HWND hwnd); long FAR PASCAL ParentWP(HWND hWnd, UINT message, WPARAM wParam, LONG lParam) { static int Thing; static int RepeatCount = 0; static long lastMsgTime = 0; static WORD lastMsgWParam = 0; static BOOL processCntrlI = TRUE; static BOOL printerChanged = FALSE; static int CurrentWindow = PAINTid; HWND theWnd; WORD shiftStates; long lResult; int i, command, answer; HDC hdcPrint, parentDC; HCURSOR oldcsr; RECT trect, trect1; POINT tpoint, cursDiff; int x, y; DWORD dwMsgPos; HPALETTE hOldPalette = NULL; BOOL bButtonDown; WORD scrollAmount; switch (message) { case WM_ERRORMSG: SimpleMessage((WORD)wParam, (LPTSTR)lParam, MB_OK | MB_ICONEXCLAMATION); break; /* create and initialize the image and file buffers */ case WM_ACTIVATE: if((GET_WM_ACTIVATE_STATE(wParam, lParam) == 0) && TerminateKill) { DB_OUTF((acDbgBfr,TEXT("wParam = %lx, lParam = %ld WM_ACTIVATE in parentwp\n"), wParam,lParam)); SendMessage(pbrushWnd[PAINTid], WM_TERMINATE, 0, 0L); } else if(GET_WM_ACTIVATE_STATE(wParam,lParam) == 2 && !IsIconic(hWnd)) { GetWindowRect(pbrushWnd[PAINTid], &trect); dwMsgPos = GetMessagePos(); LONG2POINT(dwMsgPos, tpoint); bJustActivated = PtInRect(&trect, tpoint); } lResult = DefWindowProc(hWnd, message, wParam, lParam); if(printerChanged) { PostMessage(pbrushWnd[PARENTid], WM_ERRORMSG, IDSPrinterChange, 0L); printerChanged = FALSE; } return(lResult); break; case WM_CREATE: defaultWid = GetSystemMetrics(SM_CXSCREEN); defaultHgt = GetSystemMetrics(SM_CYSCREEN); break; #ifdef DBCS_IME // control IME status when focus messages are received case WM_SETFOCUS: bInitialIMEState = WINNLSEnableIME(NULL,FALSE); // KKBUGFIX //02/12/93 raid #3725 bGetIMEState = TRUE; break; case WM_KILLFOCUS: // KKBUGFIX //02/12/93 raid #3725 if(bGetIMEState) { WINNLSEnableIME(NULL,bInitialIMEState); bGetIMEState = FALSE; } break; #endif case WM_SYSCOMMAND: if(drawing) break; oldcsr = SetCursor(LoadCursor(NULL, IDC_WAIT)); command = wParam & 0xFFF0; if(!inMagnify && command != SC_MOUSEMENU && command != SC_KEYMENU) { SendMessage(pbrushWnd[PAINTid], WM_TERMINATE, 0, 0L); UpdatImg(); } else if(inMagnify && (command == SC_ICON || command == SC_CLOSE)) { SendMessage(pbrushWnd[PAINTid], WM_ZOOMACCEPT, 0, 0L); SendMessage(pbrushWnd[PAINTid], WM_TERMINATE, 0, 0L); UpdatImg(); } else if(bZoomedOut || (inMagnify && command != SC_MOUSEMENU && command != SC_KEYMENU)) { SendMessage(pbrushWnd[PAINTid], WM_SCROLLINIT, 0, 0L); } else if(command == SC_MOUSEMENU || command == SC_KEYMENU) { SendMessage(pbrushWnd[PAINTid], WM_HIDECURSOR, 0, 0L); } SetCursor(oldcsr); return(DefWindowProc(hWnd,message,wParam,lParam)); break; /* enable paste item if bitmap in clipboard */ case WM_INITMENU: { CLIPFORMAT cf; SendMessage(pbrushWnd[PAINTid], WM_HIDECURSOR, 0, 0l); EnableMenuItem(ghMenuFrame, EDITpaste, (!inMagnify && OleClipboardContainsAcceptableFormats(&cf)) ? MF_ENABLED : MF_GRAYED); if(bPrtCreateErr) { bPrtCreateErr = FALSE; GetPrintParms(NULL); } if(!gfInPlace) EnableMenuItem(ghMenuFrame, FILEprint, bPrtCreateErr? MF_GRAYED : MF_ENABLED); break; } case WM_COMMAND: if(pbrushWnd[PARENTid] && (GET_WM_COMMAND_ID(wParam, lParam) != STYLEitalic || processCntrlI)) { MenuCmd(hWnd, GET_WM_COMMAND_ID(wParam, lParam)); } break; case WM_DESTROY: #ifdef DBCS_IME /* backup IME status before we gone */ // KKBUGFIX //02/12/93 raid #3725 if(bGetIMEState) { WINNLSEnableIME(NULL, bInitialIMEState); bGetIMEState = FALSE; } #endif /* delete allocated image and file buffer memory */ FreeImg(); // TerminateShapeLibrary(); Help(hWnd, HELP_QUIT, 0L); if (lpMouseDlg) FreeProcInstance(lpMouseDlg); if (lpColorDlg) FreeProcInstance(lpColorDlg); if (lpNullWP) FreeProcInstance(lpNullWP); if(hPalette) DeleteObject(hPalette); hPalette = NULL; pbrushWnd[PARENTid] = NULL; /* signal parent window invalid */ PostQuitMessage(0); break; case WM_MOVE: /* reposition the mouse window */ if(!gfInPlace && mouseFlag) { GetWindowRect(mouseWnd, &trect); GetWindowRect(pbrushWnd[PARENTid], &trect1); MoveWindow(mouseWnd, trect1.right - 2 * GetSystemMetrics(SM_CXSIZE) - GetSystemMetrics(SM_CXFRAME) - (trect.right - trect.left) - 8, trect1.top + GetSystemMetrics(SM_CYFRAME) - GetSystemMetrics(SM_CYBORDER), trect.right - trect.left, trect.bottom - trect.top, TRUE); } return(DefWindowProc(hWnd,message,wParam,lParam)); break; case WM_SIZE: oldcsr = SetCursor(LoadCursor(NULL, IDC_WAIT)); if(!gfInPlace && wParam != SIZEICONIC) { pbrushRct[PARENTid].right = LOWORD(lParam); pbrushRct[PARENTid].bottom = HIWORD(lParam); if(pbrushWnd[PAINTid] && IsWindow(pbrushWnd[PAINTid])) { CalcWnds(NOCHANGEWINDOW, NOCHANGEWINDOW, NOCHANGEWINDOW, NOCHANGEWINDOW); for(i = 1; i < MAXwnds; ++i) { MoveWindow(pbrushWnd[i], pbrushRct[i].left, pbrushRct[i].top, pbrushRct[i].right - pbrushRct[i].left, pbrushRct[i].bottom - pbrushRct[i].top, TRUE); } if(bZoomedOut) { MoveWindow(zoomOutWnd, pbrushRct[PAINTid].left, pbrushRct[PAINTid].top, pbrushRct[PAINTid].right - pbrushRct[PAINTid].left, pbrushRct[PAINTid].bottom - pbrushRct[PAINTid].top, TRUE); } } } SetCursor(oldcsr); break; case WM_CLOSE: case WM_QUERYENDSESSION: oldcsr = SetCursor(LoadCursor(NULL, IDC_WAIT)); SendMessage(pbrushWnd[PAINTid], WM_TERMINATE, 0, 0L); UpdatImg(); if (!(answer = SaveAsNeeded())) break; if(message == WM_CLOSE) { gfUserClose = TRUE; TerminateServer(); } else { SetCursor(oldcsr); return TRUE; } SetCursor(oldcsr); break; case WM_ENDSESSION: /* remove any temporary files */ if(wParam) FreeImg(); break; case WM_ACTIVATEAPP: if(!gfStandalone) break; i = ShowCursor(wParam); DB_OUTF((acDbgBfr,TEXT("wParam = %lx, SC %d, WM_ACTIVATEAPP in parentwp\n"), wParam,i)); oldcsr = SetCursor(LoadCursor(NULL, IDC_WAIT)); if(pbrushWnd[PAINTid]) { if(wParam == 0) { if(!inMagnify && !bZoomedOut) { if(TerminateKill) { SendMessage(pbrushWnd[PAINTid], WM_TERMINATE, 0, 0L); UpdatImg(); } } else SendMessage(pbrushWnd[PAINTid], WM_SCROLLINIT, 0, 0L); } else { BringWindowToTop(pbrushWnd[PAINTid]); InvalidateRect(pbrushWnd[PAINTid], NULL, FALSE); UpdateWindow(pbrushWnd[PAINTid]); } } SetCursor(oldcsr); break; case WM_KEYUP: RepeatCount = 0; if(bZoomedOut && CurrentWindow == PAINTid) theWnd = zoomOutWnd; else theWnd = pbrushWnd[CurrentWindow]; switch(wParam) { case VK_INSERT: message = WM_LBUTTONUP; break; case VK_DELETE: message = WM_RBUTTONUP; break; default: return(0L); break; } wParam = (GetKeyState(VK_CONTROL)&0x8000 ? MK_CONTROL : 0) | (GetKeyState(VK_MBUTTON)&0x8000 ? MK_MBUTTON : 0) | (GetKeyState(VK_LBUTTON)&0x8000 ? MK_LBUTTON : 0) | (GetKeyState(VK_RBUTTON)&0x8000 ? MK_RBUTTON : 0) | (GetKeyState(VK_SHIFT )&0x8000 ? MK_SHIFT : 0); GetCursorPos(&tpoint); ScreenToClient(theWnd, &tpoint); lParam = MAKELONG(tpoint.x, tpoint.y); PostMessage(theWnd, message, wParam, lParam); break; case WM_KEYDOWN: processCntrlI = TRUE; shiftStates = (WORD)((GetKeyState(VK_CONTROL)&0x8000 ? MK_CONTROL : 0) | (GetKeyState(VK_MBUTTON)&0x8000 ? MK_MBUTTON : 0) | (GetKeyState(VK_LBUTTON)&0x8000 ? MK_LBUTTON : 0) | (GetKeyState(VK_RBUTTON)&0x8000 ? MK_RBUTTON : 0) | (GetKeyState(VK_SHIFT )&0x8000 ? MK_SHIFT : 0)); bButtonDown = (GetKeyState(VK_INSERT)&0x8000) || (GetKeyState(VK_DELETE)&0x8000) || (GetKeyState(VK_LBUTTON)&0x8000) || (GetKeyState(VK_RBUTTON)&0x8000); GetCursorPos(&tpoint); CurrentWindow = PAINTid; /* in case cursor not in any window */ for(i = 1; i < MAXwnds; ++i) { GetWindowRect(pbrushWnd[i], &trect); if(PtInRect(&trect, tpoint)) { CurrentWindow = i; break; } } if(bZoomedOut && CurrentWindow == PAINTid) theWnd = zoomOutWnd; else theWnd = pbrushWnd[CurrentWindow]; GetWindowRect(theWnd, &trect1); GetClientRect(theWnd, &trect); switch(wParam) { case VK_ESCAPE: if(!bButtonDown && bZoomedOut) SendMessage(zoomOutWnd, message, wParam, lParam); break; case VK_INSERT: if((lParam&(1L<<30)) || (GetKeyState(VK_LBUTTON)&0x8000)) break; message = WM_LBUTTONDOWN; wParam = shiftStates; ScreenToClient(theWnd, &tpoint); lParam = MAKELONG(tpoint.x, tpoint.y); PostMessage(theWnd, message, wParam, lParam); if(GetKeyState(VK_F9) & 0x8000) { PostMessage(theWnd, WM_LBUTTONUP, wParam, lParam); PostMessage(theWnd, WM_LBUTTONDBLCLK, wParam, lParam); } break; case VK_DELETE: if((lParam&(1L<<30)) || (GetKeyState(VK_RBUTTON)&0x8000)) break; message = WM_RBUTTONDOWN; wParam = shiftStates; ScreenToClient(theWnd, &tpoint); lParam = MAKELONG(tpoint.x, tpoint.y); PostMessage(theWnd, message, wParam, lParam); if(GetKeyState(VK_F9) & 0x8000) { PostMessage(theWnd, WM_RBUTTONUP, wParam, lParam); PostMessage(theWnd, WM_RBUTTONDBLCLK, wParam, lParam); } break; case VK_HOME: scrollAmount = SB_TOP; goto KeyScroll; case VK_END: scrollAmount = SB_BOTTOM; goto KeyScroll; case VK_NEXT: scrollAmount = SB_PAGEDOWN; goto KeyScroll; case VK_PRIOR: scrollAmount = SB_PAGEUP; goto KeyScroll; KeyScroll: if(!bButtonDown) { PostMessage(pbrushWnd[PAINTid], (WORD)((GetKeyState(VK_SHIFT) & 0x8000) ? WM_HSCROLL : WM_VSCROLL), (WPARAM)scrollAmount, 0l); } break; case VK_RIGHT: RepeatCount++; if(!bButtonDown && (GetKeyState(VK_SHIFT)&0x8000)) { PostMessage(pbrushWnd[PAINTid], WM_HSCROLL, SB_LINEDOWN, 0l); } else { switch (CurrentWindow) { case TOOLid: if(Thing < MAXtools-1) ++Thing; goto MoveToolCurs; case PAINTid: cursDiff.x = RepeatCount; cursDiff.y = 0; goto MovePaintCurs; case SIZEid: goto MoveSizeCurs; case COLORid: if(Thing < MAXcolors - 2) Thing += 2; goto MoveColorCurs; } } break; case VK_LEFT: RepeatCount++; if(!bButtonDown && (GetKeyState(VK_SHIFT) & 0x8000)) { PostMessage(pbrushWnd[PAINTid], WM_HSCROLL, SB_LINEUP, 0l); } else { switch (CurrentWindow) { case TOOLid: if(Thing > 0) --Thing; goto MoveToolCurs; case PAINTid: cursDiff.x = -RepeatCount; cursDiff.y = 0; goto MovePaintCurs; case SIZEid: goto MoveSizeCurs; case COLORid: if(Thing > 1) Thing -= 2; goto MoveColorCurs; } } break; case VK_UP: RepeatCount++; if(!bButtonDown && (GetKeyState(VK_SHIFT) & 0x8000)) { PostMessage(pbrushWnd[PAINTid], WM_VSCROLL, SB_LINEUP, 0l); } else { switch (CurrentWindow) { case TOOLid: if(Thing > 1) Thing -= 2; goto MoveToolCurs; case PAINTid: cursDiff.x = 0; cursDiff.y = -RepeatCount; goto MovePaintCurs; case SIZEid: if(Thing > 0) --Thing; goto MoveSizeCurs; case COLORid: if(Thing > 0) --Thing; goto MoveColorCurs; } } break; case VK_DOWN: RepeatCount++; if(!bButtonDown && (GetKeyState(VK_SHIFT) & 0x8000)) { PostMessage(pbrushWnd[PAINTid], WM_VSCROLL, SB_LINEDOWN, 0l); } else { switch (CurrentWindow) { case TOOLid: if(Thing < MAXtools-2) Thing += 2; goto MoveToolCurs; case PAINTid: cursDiff.x = 0; cursDiff.y = RepeatCount; goto MovePaintCurs; case SIZEid: if(Thing < NUM_SIZES-1) ++Thing; goto MoveSizeCurs; case COLORid: if(Thing < MAXcolors-1) ++Thing; goto MoveColorCurs; } } break; case VK_TAB: processCntrlI = FALSE; dwMsgPos = GetMessagePos(); LONG2POINT(dwMsgPos,tpoint); SendMessage(theWnd, WM_LBUTTONUP, shiftStates, MAKELONG((tpoint.x - trect1.left) - GetSystemMetrics(SM_CXBORDER), (tpoint.y - trect1.top) - GetSystemMetrics(SM_CYBORDER))); do { if(!(GetKeyState(VK_SHIFT) & 0x8000)) CurrentWindow = CurrentWindow % 4 + 1; else CurrentWindow = (CurrentWindow + 2) % 4 + 1; if(bZoomedOut && CurrentWindow == PAINTid) theWnd = zoomOutWnd; else theWnd = pbrushWnd[CurrentWindow]; } while (!IsWindowVisible(theWnd)); GetWindowRect(theWnd, &trect1); GetClientRect(theWnd, &trect); switch (CurrentWindow) { case TOOLid: Thing = theTool; goto MoveToolCurs; case PAINTid: cursDiff.x = cursDiff.y = 0; goto MovePaintCurs; case SIZEid: for(Thing=0; SizeTable[Thing]<theSize; ++Thing) ; goto MoveSizeCurs; case COLORid: Thing = theForeg; goto MoveColorCurs; } break; MoveToolCurs: x = (trect.right * ((Thing % 2) * 2 + 1)) / 4; y = (trect.bottom * ((Thing >> 1) * 2 + 1)) / MAXtools; goto MoveMiscCurs; MoveSizeCurs: x = trect.right / 2; y = (int)((long)(YPosTable[Thing] + SizeTable[Thing] / 2) * (trect.bottom - trect.top) / SIZE_EXTY); goto MoveMiscCurs; MoveColorCurs: x = ((((Thing >> 1) << 1) + 5) * (trect.right/ COLORdiv)) / 2; y = (((Thing % 2) * 2 + 1) * trect.bottom) / 4 ; goto MoveMiscCurs; MoveMiscCurs: SetCursorPos(trect1.left + GetSystemMetrics(SM_CXBORDER) + x, trect1.top + GetSystemMetrics(SM_CYBORDER) + y); break; MovePaintCurs: ClientToScreen(theWnd, ((LPPOINT)&trect)); ClientToScreen(theWnd, ((LPPOINT)&trect) + 1); GetCursorPos(&tpoint); tpoint.x += cursDiff.x; tpoint.y += cursDiff.y; tpoint.x = min(max(tpoint.x, trect.left), trect.right - 1); tpoint.y = min(max(tpoint.y, trect.top ), trect.bottom - 1); SetCursorPos(tpoint.x, tpoint.y); break; default: break; } break; #ifdef KOREA case WM_INTERIM: #endif case WM_CHAR: SendMessage(pbrushWnd[PAINTid], message, wParam, lParam); break; #ifdef JAPAN // added by Hiraisi case WM_IME_REPORT: return( SendMessage(pbrushWnd[PAINTid], message, wParam, lParam) ); break; #endif case WM_WININICHANGE: oldcsr = SetCursor(LoadCursor(NULL, IDC_WAIT)); if(!bIsPrinterDefault) { if(!(hdcPrint = GetPrtDC())) { bIsPrinterDefault = TRUE; printerChanged = TRUE; hWnd = GetActiveWindow(); for(i = 1; i < MAXwnds; ++i) { if(pbrushWnd[i] == hWnd) printerChanged = FALSE; } if(!printerChanged) { PostMessage(pbrushWnd[PARENTid], WM_ERRORMSG, IDSPrinterChange, 0L); } } else { DeleteDC(hdcPrint); } } /* Let Commdlg retrieve default printer so we can modify printer * settings with hDevMode. */ if(bIsPrinterDefault) GetDefaultPort(); GetPrintParms(NULL); InitDecimal((LPTSTR)lParam); if (mouseFlag) { TCHAR szSep[3]; extern TCHAR sList[]; extern TCHAR szIntl[]; GetProfileString(szIntl, sList, TEXT(","), szSep, CharSizeOf(szSep)); SetDlgItemText(mouseWnd, IDMOUSESEP, szSep); } SetCursor(oldcsr); break; case WM_DROPFILES: /*case added 03/26/91 for file drag/drop support*/ doDrop((HANDLE)wParam,hWnd); break; /* Palette manager stuff... */ case WM_QUERYNEWPALETTE: /* we are about to receive input focus. Return TRUE if we realize palette, FALSE otherwise. */ if(!hPalette) break; parentDC = GetDC(hWnd); hOldPalette = SelectPalette(parentDC, hPalette, 0); x = RealizePalette(parentDC); if (hOldPalette) SelectPalette(parentDC, hOldPalette, 0); ReleaseDC(hWnd, parentDC); if(x) { InvalidateRect(hWnd, (LPRECT) NULL, 1); return TRUE; } else { return FALSE; } break; case WM_PALETTECHANGED: if(!hPalette || (HWND)wParam == hWnd) break; parentDC = GetDC(hWnd); hOldPalette = SelectPalette(parentDC, hPalette, 0); x = RealizePalette(parentDC); InvalidateRect(hWnd, NULL, FALSE); if (hOldPalette) SelectPalette(parentDC, hOldPalette, 0); ReleaseDC(hWnd, parentDC); break; default: return(DefWindowProc(hWnd,message,wParam,lParam)); break; } return(0L); } BOOL FAR SaveAsNeeded(void) { WORD wResult = IDYES; iExitWithSaving = IDNO; if (gfDirty) { if(gfStandalone) { wResult = SimpleMessage((WORD)IDSSaveTo, *fileName ? fileName : noFile, MB_YESNOCANCEL | MB_ICONEXCLAMATION); } switch (wResult) { case IDCANCEL: return FALSE; break; case IDYES: SendMessage(pbrushWnd[PARENTid], WM_COMMAND, FILEsave, 0L); // IsCanceled is set to TRUE if during FILEsave, the SaveAs // dlg is cancelled or file write fails if (IsCanceled) return FALSE; break; default: return IDNO; break; } } return IDYES; } /* Proccess file drop/drag options. */ void doDrop(HANDLE hDrop, HWND hwnd) { TCHAR szPath[MAX_PATH]; if (DragQueryFile(hDrop, 0xFFFFFFFF, NULL, 0)) /* # of files dropped */ { /* If user dragged/dropped a file regardless of keys pressed * at the time, open the first selected file from file * manager. */ DragQueryFile(hDrop,0,szPath,MAX_PATH); SetActiveWindow(hwnd); FileDragOpen(szPath); } DragFinish(hDrop); /* Delete structure alocated for WM_DROPFILES*/ } void FileDragOpen(TCHAR szPath[]) { if (!SaveAsNeeded()) return; SetupFileVars(szPath); SendMessage(pbrushWnd[PARENTid], WM_COMMAND, GET_WM_COMMAND_MPS(FILEload, NULL, 0)); }
nyaxt/ptnk
ptnk_bench.cpp
<filename>ptnk_bench.cpp<gh_stars>1-10 #include "bench_tmpl.h" #include "ptnk.h" using namespace ptnk; void run_bench() { // sleep(3); Bench b("ptnk_bench", comment); { ptnk_opts_t opts = OWRITER | OCREATE | OTRUNCATE | OPARTITIONED; if(do_sync) opts |= OAUTOSYNC; b.start(); DB db(dbfile, opts); b.cp("db init"); int ik = 0; if(NUM_PREW > 0) { while(ik < NUM_PREW) { unique_ptr<DB::Tx> tx(db.newTransaction()); for(int j = 0; j < 100; ++ j) { int k = keys[ik++]; char buf[9]; sprintf(buf, "%08u", k); tx->put(BufferCRef(buf, ::strlen(buf)), BufferCRef(&k, sizeof(int))); } tx->tryCommit(); } // don't include preloading time fprintf(stderr, "prewrite %d keys done\n", ik); b.start(); b.cp("db init"); } Buffer v; for(int itx = 0; itx < NUM_TX; ++ itx) { unique_ptr<DB::Tx> tx(db.newTransaction()); for(int iw = 0; iw < NUM_W_PER_TX; ++ iw) { int k = keys[ik++]; char buf[9]; sprintf(buf, "%08u", k); tx->put(BufferCRef(buf, ::strlen(buf)), BufferCRef(&k, sizeof(int))); } for(int ir = 0; ir < NUM_R_PER_TX; ++ ir) { int k = keys[rand() % ik]; char buf[9]; sprintf(buf, "%08u", k); tx->get(BufferCRef(&buf, ::strlen(buf)), &v); } tx->tryCommit(); if(do_intensiverebase) db.rebase(); // tx->dumpStat(); } b.cp("tx done"); if(NUM_W_PER_TX == 0) { // avoid measuring clean up time b.end();b.dump(); exit(0); } db.rebase(); } b.end(); b.dump(); }
LukasLohoff/shogun
shogun-boot/src/main/java/de/terrestris/shogun/boot/config/BootWebSecurityConfig.java
<gh_stars>1-10 /* SHOGun, https://terrestris.github.io/shogun/ * * Copyright © 2020-present terrestris GmbH & Co. KG * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.terrestris.shogun.boot.config; import de.terrestris.shogun.config.WebSecurityConfig; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.web.csrf.CookieCsrfTokenRepository; import org.springframework.security.web.util.matcher.RequestMatcher; @Configuration @EnableWebSecurity public class BootWebSecurityConfig extends WebSecurityConfig { RequestMatcher csrfRequestMatcher = httpServletRequest -> { String refererHeader = httpServletRequest.getHeader("Referer"); return refererHeader != null && refererHeader.endsWith("swagger-ui/index.html"); }; @Override protected void customHttpConfiguration(HttpSecurity http) throws Exception { http .authorizeRequests() .antMatchers( "/", "/auth/**", "/info/**", "/index.html", // Enable anonymous access to swagger docs "/swagger-ui/index.html", "/webjars/springfox-swagger-ui/**", "/swagger-resources/**", "/v2/api-docs" ) .permitAll() .antMatchers( "/actuator/**", "/cache/**", "/webhooks/**" ) .hasRole("ADMIN") .anyRequest() .authenticated() .and() .httpBasic() .and() .formLogin() .defaultSuccessUrl("/index.html") .permitAll() .and() .rememberMe() .key("SuPeRuNiQuErEmEmBeRmEKeY") .and() .logout() .permitAll() .and() .csrf() .csrfTokenRepository(CookieCsrfTokenRepository.withHttpOnlyFalse()) .ignoringRequestMatchers(csrfRequestMatcher) .ignoringAntMatchers("/graphql") .ignoringAntMatchers("/actuator/**"); } }
ckamtsikis/cmssw
RecoMuon/MuonIdentification/src/MuonCaloCompatibility.cc
// -*- C++ -*- // // Package: MuonIdentification // Class: MuonCaloCompatibility // /* Description: test track muon hypothesis using energy deposition in ECAL,HCAL,HO */ // // Original Author: <NAME> // // #include "RecoMuon/MuonIdentification/interface/MuonCaloCompatibility.h" #include "DataFormats/TrackReco/interface/Track.h" #include "FWCore/MessageLogger/interface/MessageLogger.h" #include "TFile.h" void MuonCaloCompatibility::configure(const edm::ParameterSet& iConfig) { const std::string muonfileName = (iConfig.getParameter<edm::FileInPath>("MuonTemplateFileName")).fullPath(); const std::string pionfileName = (iConfig.getParameter<edm::FileInPath>("PionTemplateFileName")).fullPath(); TFile muon_templates(muonfileName.c_str(), "READ"); TFile pion_templates(pionfileName.c_str(), "READ"); pion_em_etaEmi.reset((TH2D*)pion_templates.Get("em_etaEmi")); pion_had_etaEmi.reset((TH2D*)pion_templates.Get("had_etaEmi")); pion_em_etaTmi.reset((TH2D*)pion_templates.Get("em_etaTmi")); pion_had_etaTmi.reset((TH2D*)pion_templates.Get("had_etaTmi")); pion_em_etaB.reset((TH2D*)pion_templates.Get("em_etaB")); pion_had_etaB.reset((TH2D*)pion_templates.Get("had_etaB")); pion_ho_etaB.reset((TH2D*)pion_templates.Get("ho_etaB")); pion_em_etaTpl.reset((TH2D*)pion_templates.Get("em_etaTpl")); pion_had_etaTpl.reset((TH2D*)pion_templates.Get("had_etaTpl")); pion_em_etaEpl.reset((TH2D*)pion_templates.Get("em_etaEpl")); pion_had_etaEpl.reset((TH2D*)pion_templates.Get("had_etaEpl")); muon_em_etaEmi.reset((TH2D*)muon_templates.Get("em_etaEmi")); muon_had_etaEmi.reset((TH2D*)muon_templates.Get("had_etaEmi")); muon_em_etaTmi.reset((TH2D*)muon_templates.Get("em_etaTmi")); muon_had_etaTmi.reset((TH2D*)muon_templates.Get("had_etaTmi")); muon_em_etaB.reset((TH2D*)muon_templates.Get("em_etaB")); muon_had_etaB.reset((TH2D*)muon_templates.Get("had_etaB")); muon_ho_etaB.reset((TH2D*)muon_templates.Get("ho_etaB")); muon_em_etaTpl.reset((TH2D*)muon_templates.Get("em_etaTpl")); muon_had_etaTpl.reset((TH2D*)muon_templates.Get("had_etaTpl")); muon_em_etaEpl.reset((TH2D*)muon_templates.Get("em_etaEpl")); muon_had_etaEpl.reset((TH2D*)muon_templates.Get("had_etaEpl")); // Release from the opened file pion_em_etaEmi->SetDirectory(nullptr); pion_had_etaEmi->SetDirectory(nullptr); pion_em_etaTmi->SetDirectory(nullptr); pion_had_etaTmi->SetDirectory(nullptr); pion_em_etaB->SetDirectory(nullptr); pion_had_etaB->SetDirectory(nullptr); pion_ho_etaB->SetDirectory(nullptr); pion_em_etaTpl->SetDirectory(nullptr); pion_had_etaTpl->SetDirectory(nullptr); pion_em_etaEpl->SetDirectory(nullptr); pion_had_etaEpl->SetDirectory(nullptr); muon_em_etaEmi->SetDirectory(nullptr); muon_had_etaEmi->SetDirectory(nullptr); muon_em_etaTmi->SetDirectory(nullptr); muon_had_etaTmi->SetDirectory(nullptr); muon_em_etaB->SetDirectory(nullptr); muon_had_etaB->SetDirectory(nullptr); muon_ho_etaB->SetDirectory(nullptr); muon_em_etaTpl->SetDirectory(nullptr); muon_had_etaTpl->SetDirectory(nullptr); muon_em_etaEpl->SetDirectory(nullptr); muon_had_etaEpl->SetDirectory(nullptr); // change names const std::string prefixPion = "MuonCaloCompatibility_pion_"; pion_em_etaEmi->SetName((prefixPion + pion_em_etaEmi->GetName()).c_str()); pion_had_etaEmi->SetName((prefixPion + pion_had_etaEmi->GetName()).c_str()); pion_em_etaTmi->SetName((prefixPion + pion_em_etaTmi->GetName()).c_str()); pion_had_etaTmi->SetName((prefixPion + pion_had_etaTmi->GetName()).c_str()); pion_em_etaB->SetName((prefixPion + pion_em_etaB->GetName()).c_str()); pion_had_etaB->SetName((prefixPion + pion_had_etaB->GetName()).c_str()); pion_ho_etaB->SetName((prefixPion + pion_ho_etaB->GetName()).c_str()); pion_em_etaTpl->SetName((prefixPion + pion_em_etaTpl->GetName()).c_str()); pion_had_etaTpl->SetName((prefixPion + pion_had_etaTpl->GetName()).c_str()); pion_em_etaEpl->SetName((prefixPion + pion_em_etaEpl->GetName()).c_str()); pion_had_etaEpl->SetName((prefixPion + pion_had_etaEpl->GetName()).c_str()); const std::string prefixMuon = "MuonCaloCompatibility_muon_"; muon_em_etaEmi->SetName((prefixMuon + muon_em_etaEmi->GetName()).c_str()); muon_had_etaEmi->SetName((prefixMuon + muon_had_etaEmi->GetName()).c_str()); muon_em_etaTmi->SetName((prefixMuon + muon_em_etaTmi->GetName()).c_str()); muon_had_etaTmi->SetName((prefixMuon + muon_had_etaTmi->GetName()).c_str()); muon_em_etaB->SetName((prefixMuon + muon_em_etaB->GetName()).c_str()); muon_had_etaB->SetName((prefixMuon + muon_had_etaB->GetName()).c_str()); muon_ho_etaB->SetName((prefixMuon + muon_ho_etaB->GetName()).c_str()); muon_em_etaTpl->SetName((prefixMuon + muon_em_etaTpl->GetName()).c_str()); muon_had_etaTpl->SetName((prefixMuon + muon_had_etaTpl->GetName()).c_str()); muon_em_etaEpl->SetName((prefixMuon + muon_em_etaEpl->GetName()).c_str()); muon_had_etaEpl->SetName((prefixMuon + muon_had_etaEpl->GetName()).c_str()); pbx = -1; pby = -1; pbz = -1; psx = -1; psy = -1; psz = -1; muon_compatibility = -1; use_corrected_hcal = true; use_em_special = true; isConfigured_ = true; } bool MuonCaloCompatibility::accessing_overflow(const TH2D& histo, double x, double y) { bool access = false; if (histo.GetXaxis()->FindBin(x) == 0 || histo.GetXaxis()->FindBin(x) > histo.GetXaxis()->GetNbins()) { access = true; } if (histo.GetYaxis()->FindBin(y) == 0 || histo.GetYaxis()->FindBin(y) > histo.GetYaxis()->GetNbins()) { access = true; } return access; } double MuonCaloCompatibility::evaluate(const reco::Muon& amuon) { if (!isConfigured_) { edm::LogWarning("MuonIdentification") << "MuonCaloCompatibility is not configured! Nothing is calculated."; return -9999; } double eta = 0.; double p = 0.; double em = 0.; double had = 0.; double ho = 0.; // had forgotten this reset in previous versions 070409 pbx = 1.; pby = 1.; pbz = 1.; psx = 1.; psy = 1.; psz = 1.; muon_compatibility = -1.; pion_template_em = nullptr; muon_template_em = nullptr; pion_template_had = nullptr; muon_template_had = nullptr; pion_template_ho = nullptr; muon_template_ho = nullptr; // 071002: Get either tracker track, or SAmuon track. // CaloCompatibility templates may have to be specialized for // the use with SAmuons, currently just using the ones produced // using tracker tracks. const reco::Track* track = nullptr; if (!amuon.track().isNull()) { track = amuon.track().get(); } else { if (!amuon.standAloneMuon().isNull()) { track = amuon.standAloneMuon().get(); } else { throw cms::Exception("FatalError") << "Failed to fill muon id calo_compatibility information for a muon with undefined references to tracks"; } } if (!use_corrected_hcal) { // old eta regions, uncorrected energy eta = track->eta(); p = track->p(); // new 070904: Set lookup momentum to 1999.9 if larger than 2 TeV. // Though the templates were produced with p<2TeV, we believe that // this approximation should be roughly valid. A special treatment // for >1 TeV muons is advisable anyway :) if (p >= 2000.) p = 1999.9; // p = 10./sin(track->theta()); // use this for templates < 1_5 if (use_em_special) { if (amuon.calEnergy().em == 0.) em = -5.; else em = amuon.calEnergy().em; } else { em = amuon.calEnergy().em; } had = amuon.calEnergy().had; ho = amuon.calEnergy().ho; } else { eta = track->eta(); p = track->p(); // new 070904: Set lookup momentum to 1999.9 if larger than 2 TeV. // Though the templates were produced with p<2TeV, we believe that // this approximation should be roughly valid. A special treatment // for >1 TeV muons is advisable anyway :) if (p >= 2000.) p = 1999.9; // p = 10./sin(track->theta()); // use this for templates < 1_5 // hcal energy is now done where we get the template histograms (to use corrected cal energy)! // had = amuon.calEnergy().had; if (use_em_special) { if (amuon.calEnergy().em == 0.) em = -5.; else em = amuon.calEnergy().em; } else { em = amuon.calEnergy().em; } ho = amuon.calEnergy().ho; } // Skip everyting and return "I don't know" (i.e. 0.5) for uncovered regions: // if( p < 0. || p > 500.) return 0.5; // removed 500 GeV cutoff 070817 after updating the tempates (v2_0) to have valid entried beyond 500 GeV if (p < 0.) return 0.5; // return "unknown" for unphysical momentum input. if (fabs(eta) > 2.5) return 0.5; // temporary fix for low association efficiency: // set caloCompatibility to 0.12345 for tracks // which have 0 energy in BOTH ecal and hcal if (amuon.calEnergy().had == 0.0 && amuon.calEnergy().em == 0.0) return 0.12345; // std::cout<<std::endl<<"Input values are: "<<eta <<" "<< p <<" "<< em <<" "<< had <<" "<< ho; // depending on the eta, choose correct histogram: (now all for barrel): // bad! eta range has to be syncronised with choice for histogram... should be read out from the histo file somehow... 070322 if (42 != 42) { // old eta ranges and uncorrected hcal energy if (eta <= -1.4) { // std::cout<<"Emi"<<std::endl; pion_template_em = pion_em_etaEmi; pion_template_had = pion_had_etaEmi; muon_template_em = muon_em_etaEmi; muon_template_had = muon_had_etaEmi; } else if (eta > -1.4 && eta <= -1.31) { // std::cout<<"Tmi"<<std::endl; pion_template_em = pion_em_etaTmi; pion_template_had = pion_had_etaTmi; muon_template_em = muon_em_etaTmi; muon_template_had = muon_had_etaTmi; } else if (eta > -1.31 && eta <= 1.31) { // std::cout<<"B"<<std::endl; pion_template_em = pion_em_etaB; pion_template_had = pion_had_etaB; pion_template_ho = pion_ho_etaB; muon_template_em = muon_em_etaB; muon_template_had = muon_had_etaB; muon_template_ho = muon_ho_etaB; } else if (eta > 1.31 && eta <= 1.4) { // std::cout<<"Tpl"<<std::endl; pion_template_em = pion_em_etaTpl; pion_template_had = pion_had_etaTpl; muon_template_em = muon_em_etaTpl; muon_template_had = muon_had_etaTpl; } else if (eta > 1.4) { // std::cout<<"Epl"<<std::endl; pion_template_em = pion_em_etaEpl; pion_template_had = pion_had_etaEpl; muon_template_em = muon_em_etaEpl; muon_template_had = muon_had_etaEpl; } else { LogTrace("MuonIdentification") << "Some very weird thing happened in MuonCaloCompatibility::evaluate - go figure ;) "; return -999; } } else if (42 == 42) { // new eta bins, corrected hcal energy if (track->eta() > 1.27) { // had_etaEpl ->Fill(muon->track().get()->p(),1.8/2.2*muon->calEnergy().had ); if (use_corrected_hcal) had = 1.8 / 2.2 * amuon.calEnergy().had; else had = amuon.calEnergy().had; pion_template_had = pion_had_etaEpl; muon_template_had = muon_had_etaEpl; } if (track->eta() <= 1.27 && track->eta() > 1.1) { // had_etaTpl ->Fill(muon->track().get()->p(),(1.8/(-2.2*muon->track().get()->eta()+5.5))*muon->calEnergy().had ); if (use_corrected_hcal) had = (1.8 / (-2.2 * track->eta() + 5.5)) * amuon.calEnergy().had; else had = amuon.calEnergy().had; pion_template_had = pion_had_etaTpl; muon_template_had = muon_had_etaTpl; } if (track->eta() <= 1.1 && track->eta() > -1.1) { // had_etaB ->Fill(muon->track().get()->p(),sin(muon->track().get()->theta())*muon->calEnergy().had ); if (use_corrected_hcal) had = sin(track->theta()) * amuon.calEnergy().had; else had = amuon.calEnergy().had; pion_template_had = pion_had_etaB; muon_template_had = muon_had_etaB; } if (track->eta() <= -1.1 && track->eta() > -1.27) { // had_etaTmi ->Fill(muon->track().get()->p(),(1.8/(-2.2*muon->track().get()->eta()+5.5))*muon->calEnergy().had ); if (use_corrected_hcal) had = (1.8 / (2.2 * track->eta() + 5.5)) * amuon.calEnergy().had; else had = amuon.calEnergy().had; pion_template_had = pion_had_etaTmi; muon_template_had = muon_had_etaTmi; } if (track->eta() <= -1.27) { // had_etaEmi ->Fill(muon->track().get()->p(),1.8/2.2*muon->calEnergy().had ); if (use_corrected_hcal) had = 1.8 / 2.2 * amuon.calEnergy().had; else had = amuon.calEnergy().had; pion_template_had = pion_had_etaEmi; muon_template_had = muon_had_etaEmi; } // just two eta regions for Ecal (+- 1.479 for barrel, else for rest), no correction: // std::cout<<"We have a muon with an eta of: "<<track->eta()<<std::endl; if (track->eta() > 1.479) { // em_etaEpl ->Fill(muon->track().get()->p(),muon->calEnergy().em ); // // em_etaTpl ->Fill(muon->track().get()->p(),muon->calEnergy().em ); //// em = amuon.calEnergy().em; pion_template_em = pion_em_etaEpl; muon_template_em = muon_em_etaEpl; } if (track->eta() <= 1.479 && track->eta() > -1.479) { // em_etaB ->Fill(muon->track().get()->p(),muon->calEnergy().em ); //// em = amuon.calEnergy().em; pion_template_em = pion_em_etaB; muon_template_em = muon_em_etaB; } if (track->eta() <= -1.479) { // // em_etaTmi ->Fill(muon->track().get()->p(),muon->calEnergy().em ); // em_etaEmi ->Fill(muon->track().get()->p(),muon->calEnergy().em ); //// em = amuon.calEnergy().em; pion_template_em = pion_em_etaEmi; muon_template_em = muon_em_etaEmi; } // just one barrel eta region for the HO, no correction // if( track->eta() < 1.4 && track->eta() > -1.4 ) { // experimenting now... if (track->eta() < 1.28 && track->eta() > -1.28) { // ho_etaB ->Fill(muon->track().get()->p(),muon->calEnergy().ho ); //// ho = amuon.calEnergy().ho; pion_template_ho = pion_ho_etaB; muon_template_ho = muon_ho_etaB; } } if (42 != 42) { // check validity of input template histos and input variables" pion_template_em->ls(); pion_template_had->ls(); if (pion_template_ho) pion_template_ho->ls(); muon_template_em->ls(); muon_template_had->ls(); if (muon_template_ho) muon_template_ho->ls(); LogTrace("MuonIdentification") << "Input variables: eta p em had ho " << "\n" << eta << " " << p << " " << em << " " << had << " " << ho << " " << "\n" << "cal uncorr: em had ho " << "\n" << eta << " " << p << " " << amuon.calEnergy().em << " " << amuon.calEnergy().had << " " << amuon.calEnergy().ho; } // Look up Compatibility by, where x is p and y the energy. // We have a set of different histograms for different regions of eta. // need error meassage in case the template histos are missing / the template file is not present!!! 070412 if (pion_template_em) { // access ecal background template if (accessing_overflow(*pion_template_em, p, em)) { pbx = 1.; psx = 1.; LogTrace("MuonIdentification") << " // Message: trying to access overflow bin in MuonCompatibility " "template for ecal - defaulting signal and background "; LogTrace("MuonIdentification") << " // template value to 1. " << pion_template_em->GetName() << " e: " << em << " p: " << p; } else pbx = pion_template_em->GetBinContent(pion_template_em->GetXaxis()->FindBin(p), pion_template_em->GetYaxis()->FindBin(em)); } if (pion_template_had) { // access hcal background template if (accessing_overflow(*pion_template_had, p, had)) { pby = 1.; psy = 1.; LogTrace("MuonIdentification") << " // Message: trying to access overflow bin in MuonCompatibility " "template for hcal - defaulting signal and background "; LogTrace("MuonIdentification") << " // template value to 1. " << pion_template_had->GetName() << " e: " << had << " p: " << p; } else pby = pion_template_had->GetBinContent(pion_template_had->GetXaxis()->FindBin(p), pion_template_had->GetYaxis()->FindBin(had)); } if (pion_template_ho) { // access ho background template if (accessing_overflow(*pion_template_ho, p, ho)) { pbz = 1.; psz = 1.; LogTrace("MuonIdentification") << " // Message: trying to access overflow bin in MuonCompatibility " "template for ho - defaulting signal and background "; LogTrace("MuonIdentification") << " // template value to 1. " << pion_template_ho->GetName() << " e: " << em << " p: " << p; } else pbz = pion_template_ho->GetBinContent(pion_template_ho->GetXaxis()->FindBin(p), pion_template_ho->GetYaxis()->FindBin(ho)); } if (muon_template_em) { // access ecal background template if (accessing_overflow(*muon_template_em, p, em)) { psx = 1.; pbx = 1.; LogTrace("MuonIdentification") << " // Message: trying to access overflow bin in MuonCompatibility " "template for ecal - defaulting signal and background "; LogTrace("MuonIdentification") << " // template value to 1. " << muon_template_em->GetName() << " e: " << em << " p: " << p; } else psx = muon_template_em->GetBinContent(muon_template_em->GetXaxis()->FindBin(p), muon_template_em->GetYaxis()->FindBin(em)); } if (muon_template_had) { // access hcal background template if (accessing_overflow(*muon_template_had, p, had)) { psy = 1.; pby = 1.; LogTrace("MuonIdentification") << " // Message: trying to access overflow bin in MuonCompatibility " "template for hcal - defaulting signal and background "; LogTrace("MuonIdentification") << " // template value to 1. " << muon_template_had->GetName() << " e: " << had << " p: " << p; } else psy = muon_template_had->GetBinContent(muon_template_had->GetXaxis()->FindBin(p), muon_template_had->GetYaxis()->FindBin(had)); } if (muon_template_ho) { // access ho background template if (accessing_overflow(*muon_template_ho, p, ho)) { psz = 1.; pbz = 1.; LogTrace("MuonIdentification") << " // Message: trying to access overflow bin in MuonCompatibility " "template for ho - defaulting signal and background "; LogTrace("MuonIdentification") << " // template value to 1. " << muon_template_ho->GetName() << " e: " << ho << " p: " << p; } else psz = muon_template_ho->GetBinContent(muon_template_ho->GetXaxis()->FindBin(p), muon_template_ho->GetYaxis()->FindBin(ho)); } // erm - what is this?!?! How could the HO probability be less than 0????? Do we want this line!?!? if (psz <= 0.) psz = 1.; if (pbz <= 0.) pbz = 1.; // Protection agains empty bins - set cal part to neutral if the bin of the template is empty // (temporary fix, a proper extrapolation would be better) if (psx == 0. || pbx == 0.) { psx = 1.; pbx = 1.; } if (psy == 0. || pby == 0.) { psy = 1.; pby = 1.; } if (psz == 0. || pbz == 0.) { psz = 1.; pbz = 1.; } // There are two classes of events that deliver 0 for the hcal or ho energy: // 1) the track momentum is so low that the extrapolation tells us it should not have reached the cal // 2) the crossed cell had an reading below the readout cuts. // The 2nd case discriminates between muons and hadrons, the 1st not. Thus for the time being, // we set the returned ps and pb to 0 for these cases. // We need to have a return value different from 0 for the 1st case in the long run. if (had == 0.0) { psy = 1.; pby = 1.; } if (ho == 0.0) { psz = 1.; pbz = 1.; } // Set em to neutral if no energy in em or negative energy measured. // (These cases might indicate problems in the ecal association or readout?! The only // hint so far: for critical eta region (eta in [1.52, 1.64]) have negative em values.) if (em <= 0. && !use_em_special) { pbx = 1.; psx = 1.; } if ((psx * psy * psz + pbx * pby * pbz) > 0.) muon_compatibility = psx * psy * psz / (psx * psy * psz + pbx * pby * pbz); else { LogTrace("MuonIdentification") << "Divide by 0 - defaulting consistency to 0.5 (neutral)!!"; muon_compatibility = 0.5; LogTrace("MuonIdentification") << "Input variables: eta p em had ho " << "\n" << eta << " " << p << " " << em << " " << had << " " << ho << " " << "\n" << "cal uncorr: em had ho " << "\n" << eta << " " << p << " " << amuon.calEnergy().em << " " << amuon.calEnergy().had << " " << amuon.calEnergy().ho; } return muon_compatibility; }
school-engagements/pikater
lib/jade-4.3.2/examples/mobile/GetAvailableLocationsBehaviour.java
/***************************************************************** JADE - Java Agent DEvelopment Framework is a framework to develop multi-agent systems in compliance with the FIPA specifications. Copyright (C) 2000 CSELT S.p.A. GNU Lesser General Public License This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, version 2.1 of the License. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *****************************************************************/ package examples.mobile; import jade.util.leap.*; import jade.proto.*; import jade.lang.acl.*; import jade.domain.JADEAgentManagement.*; import jade.domain.mobility.MobilityOntology; import jade.domain.FIPANames; import jade.content.lang.Codec; import jade.content.lang.sl.SLCodec; import jade.core.*; import jade.content.onto.OntologyException; import jade.content.onto.basic.Action; import jade.content.onto.basic.Result; /* * This behaviour extends SimpleAchieveREInitiator in order * to request to the AMS the list of available locations where * the agent can move. * Then, it displays these locations into the GUI * @author <NAME> - CSELT S.p.A. * @version $Date: 2003-02-25 13:29:42 +0100 (mar, 25 feb 2003) $ $Revision: 3687 $ */ public class GetAvailableLocationsBehaviour extends SimpleAchieveREInitiator { private ACLMessage request; public GetAvailableLocationsBehaviour(MobileAgent a) { // call the constructor of FipaRequestInitiatorBehaviour super(a, new ACLMessage(ACLMessage.REQUEST)); request = (ACLMessage)getDataStore().get(REQUEST_KEY); // fills all parameters of the request ACLMessage request.clearAllReceiver(); request.addReceiver(a.getAMS()); request.setLanguage(FIPANames.ContentLanguage.FIPA_SL0); request.setOntology(MobilityOntology.NAME); request.setProtocol(FIPANames.InteractionProtocol.FIPA_REQUEST); // creates the content of the ACLMessage try { Action action = new Action(); action.setActor(a.getAMS()); action.setAction(new QueryPlatformLocationsAction()); a.getContentManager().fillContent(request, action); } catch(Exception fe) { fe.printStackTrace(); } // creates the Message Template // template = MessageTemplate.and(MessageTemplate.MatchOntology(MobilityOntology.NAME),template); // reset the fiparequestinitiatorbheaviour in order to put new values // for the request aclmessage and the template reset(request); } protected void handleNotUnderstood(ACLMessage reply) { System.out.println(myAgent.getLocalName()+ " handleNotUnderstood : "+reply.toString()); } protected void handleRefuse(ACLMessage reply) { System.out.println(myAgent.getLocalName()+ " handleRefuse : "+reply.toString()); } protected void handleFailure(ACLMessage reply) { System.out.println(myAgent.getLocalName()+ " handleFailure : "+reply.toString()); } protected void handleAgree(ACLMessage reply) { } protected void handleInform(ACLMessage inform) { String content = inform.getContent(); //System.out.println(inform.toString()); try { Result results = (Result)myAgent.getContentManager().extractContent(inform); //update the GUI ((MobileAgent)myAgent).gui.updateLocations(results.getItems().iterator()); } catch(Exception e) { e.printStackTrace(); } } }
k-r-g/gemini
gemini/src/main/java/com/techempower/audit/AuditManager.java
/******************************************************************************* * Copyright (c) 2018, TechEmpower, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name TechEmpower, Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL TECHEMPOWER, INC. BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *******************************************************************************/ package com.techempower.audit; import java.util.*; import com.techempower.*; import com.techempower.log.*; /** * AuditManager is the primary means of interaction with the com.techempower * auditing functionality. AuditManager is responsible for constructing and * returning AuditSession references for use within an application. The * typical usage is to attach an AuditSession to the current thread by * calling newSession, add audits through the audit method, and then * ultimately call commit to save audits. * <p> * When using within a Gemini application, generally a getAuditManager method * is added to the GeminiApplication subclass, and the following code is added * to the Dispatcher subclass: * <p><code><pre> * public boolean dispatch(Context context) * { * try * { * // Get the current user and provide that as the cause. * application.getAuditManager().newSession(getUser(context)); * // Dispatch as normal. * return super.dispatch(context); * } * finally * { * // At the end of the request, commit the audits, if any were made. * AuditManager.commit(); * } * } * </pre></code></p> * <p> * An implementation of getUser that returns a user that implements Auditable * will also be necessary. */ public class AuditManager { // // Constants. // public static final String COMPONENT_CODE = "audt"; public static final int DEFAULT_MAXIMUM_VALUE_LENGTH = 100; // // Static variables. // private static final ThreadLocal<AuditSession> SESSION_BY_THREAD = new ThreadLocal<>(); // // Member variables. // private final ComponentLog log; private int maximumValueLength = DEFAULT_MAXIMUM_VALUE_LENGTH; private final List<AuditListener> listeners; // // Member methods. // /** * Construct a new AuditManager. */ public AuditManager(TechEmpowerApplication application) { log = application.getLog(COMPONENT_CODE); listeners = new ArrayList<>(2); } /** * @return Returns the maximumValueLength. */ public int getMaximumValueLength() { return maximumValueLength; } /** * @param maximumValueLength The maximumValueLength to set. */ public void setMaximumValueLength(int maximumValueLength) { this.maximumValueLength = maximumValueLength; } /** * Gets the log reference. */ public ComponentLog getLog() { return log; } /** * Constructs a new AuditSession, but does not attach it to the current * thread. Use this method if your usage pattern will rely on passing * the AuditSession through the call stack so that it is available to * all code that needs to make audits. * <p> * See also newSession, which automatically attaches the session to the * current thread, allowing future calls to getSession to return the same * session during a thread's execution. */ public AuditSession constructSession(Auditable cause) { return new AuditSession(this, cause); } /** * Gets the current session (uses ThreadLocal). */ public static AuditSession getSession() { return SESSION_BY_THREAD.get(); } /** * Attaches a new AuditSession to the current thread. */ public void newSession(Auditable cause) { SESSION_BY_THREAD.set(constructSession(cause)); } /** * Gets the count of the current thread's audits. */ public static int size() { AuditSession session = getSession(); if (session != null) { return session.size(); } return 0; } /** * Returns true if there is a current session and it has audits accumulated. * Returns false if the audit list is empty or if there is no current * session. */ public static boolean hasAudits() { return (size() > 0); } /** * Commits the current thread's session. Returns true if the session had * not already been committed and false if the session had already been * committed (and, therefore, the requested commit was not executed). * Note that false also may be returned if there is no active session. */ public static void commit() { AuditSession session = getSession(); if (session != null) { session.commit(); } } /** * Clears the current thread's session. */ public static void clear() { AuditSession session = getSession(); if (session != null) { session.clear(); } } /** * A pass-through to the current AuditSession's add method. This uses * getSession to get the session attached to the current Thread. */ public static void audit(int type, Auditable affected, int attributeID, String originalValue, String newValue) { AuditSession session = getSession(); if (session != null) { session.add(type, affected, attributeID, originalValue, newValue); } } /** * A pass-through to the current AuditSession's add method. This uses * getSession to get the session attached to the current Thread. */ public static void audit(int type, Auditable affected, int attributeID, Date originalValue, Date newValue) { AuditSession session = getSession(); if (session != null) { session.add(type, affected, attributeID, originalValue, newValue); } } /** * A pass-through to the current AuditSession's add method. This uses * getSession to get the session attached to the current Thread. */ public static void audit(int type, Auditable affected, int attributeID, long originalValue, long newValue) { AuditSession session = getSession(); if (session != null) { session.add(type, affected, attributeID, originalValue, newValue); } } /** * A pass-through to the current AuditSession's add method. This uses * getSession to get the session attached to the current Thread. */ public static void audit(int type, Auditable affected, int attributeID, float originalValue, float newValue) { AuditSession session = getSession(); if (session != null) { session.add(type, affected, attributeID, originalValue, newValue); } } /** * A pass-through to the current AuditSession's add method. This uses * getSession to get the session attached to the current Thread. */ public static void audit(int type, Auditable affected, int attributeID, boolean originalValue, boolean newValue) { AuditSession session = getSession(); if (session != null) { session.add(type, affected, attributeID, originalValue, newValue); } } /** * A pass-through to the current AuditSession's add method. This uses * getSession to get the session attached to the current Thread. */ public static void audit(int type, Auditable affected, int attributeID, Object originalValue, Object newValue) { AuditSession session = getSession(); if (session != null) { session.add(type, affected, attributeID, originalValue, newValue); } } /** * Register a new listener. This method is not threadsafe. */ public void addListener(AuditListener listener) { listeners.add(listener); log.log("Listener registered: " + listener.getAuditListenerName()); } /** * De-register a listener. This method is not threadsafe. */ public void removeListener(AuditListener listener) { listeners.remove(listener); log.log("Listener de-registered: " + listener.getAuditListenerName()); } /** * Commit a session of audits. */ protected void commitSession(AuditSession session) { synchronized (session) { if (!listeners.isEmpty()) { // Notify the listeners of a commit. Iterator<AuditListener> listen = listeners.iterator(); AuditListener listener; while (listen.hasNext()) { listener = listen.next(); listener.auditSessionCommitted(session); } // Notify of each audit. Audit audit; for (int i = 0; i < session.size(); i++) { audit = session.get(i); listen = listeners.iterator(); while (listen.hasNext()) { listener = listen.next(); listener.auditCommitted(session, audit); } } // Notify of completion of audit session. listen = listeners.iterator(); while (listen.hasNext()) { listener = listen.next(); listener.auditSessionCommitComplete(session); } } // listeners is 0 in size? That's no good. else { log.log("No AuditListeners are defined! Audit information will not be recorded."); } } } } // End AuditManager.
xiumu2017/mall
mall-mbg/src/main/java/com/macro/mall/service/YxxOrderCommonService.java
<filename>mall-mbg/src/main/java/com/macro/mall/service/YxxOrderCommonService.java package com.macro.mall.service; import com.github.pagehelper.PageHelper; import com.macro.mall.common.api.CommonPage; import com.macro.mall.common.constant.OrderType; import com.macro.mall.common.service.impl.DistributorService; import com.macro.mall.dao.YxxOrderCommonDao; import com.macro.mall.domain.OrderQuery; import com.macro.mall.domain.YxxOrderDetail; import com.macro.mall.domain.YxxOrderInfo; import com.macro.mall.domain.YxxOrderStatusRecordInfo; import com.macro.mall.enums.OrderStatusUtil; import com.macro.mall.example.YxxOrderStatusRecordExample; import com.macro.mall.example.YxxRepairRecordExample; import com.macro.mall.example.YxxWorkerLevelExample; import com.macro.mall.mapper.*; import com.macro.mall.model.*; import lombok.AllArgsConstructor; import org.springframework.stereotype.Service; import java.util.ArrayList; import java.util.Date; import java.util.List; import static com.macro.mall.enums.OrderStatus.*; /** * @author Paradise */ @Service @AllArgsConstructor public class YxxOrderCommonService { private final YxxMemberMapper memberMapper; private final YxxRepairRecordMapper repairRecordMapper; private final YxxOrderStatusRecordMapper orderStatusRecordMapper; private final DistributorService distributorService; private final YxxOrderCommonDao orderCommonDao; private final YxxWorkerMapper workerMapper; private final YxxOrderMapper orderMapper; private final YxxWorkerLevelMapper workerLevelMapper; private final YxxWorkerOrderCountMapper workerOrderCountMapper; private final PmsProductMapper productMapper; public YxxWorkerOrderCount countByWorkerId(Long workerId) { YxxWorkerOrderCount count = workerOrderCountMapper.selectByPrimaryKey(workerId); if (count == null) { count = YxxWorkerOrderCount.builder().assignAmount(0).distributeAmount(0).rushAmount(0) .rushAmountBargain(0).workerId(workerId).build(); workerOrderCountMapper.insert(count); } return count; } public YxxWorkerOrderCount countLeftByWorkerId(YxxWorker worker) { YxxWorkerOrderCount leftCount = new YxxWorkerOrderCount(); if (worker == null || worker.getLevelId() == null) { return leftCount; } YxxWorkerLevel workerLevel = this.getLevel(worker.getLevelId()); YxxWorkerOrderCount count = this.countByWorkerId(worker.getId()); leftCount.setWorkerId(worker.getId()); leftCount.setAssignAmount(workerLevel.getOrderAssignAmount() - count.getAssignAmount()); leftCount.setDistributeAmount(workerLevel.getOrderDistributeAmount() - count.getDistributeAmount()); leftCount.setRushAmount(workerLevel.getOrderRushAmount() - count.getRushAmount()); leftCount.setRushAmountBargain(workerLevel.getOrderRushAmountBargain() - count.getRushAmountBargain()); return leftCount; } public YxxWorkerLevel getLevel(Integer levelId) { return workerLevelMapper.selectByPrimaryKey(levelId); } public List<YxxWorkerLevel> getLevelList() { return workerLevelMapper.selectByExample(new YxxWorkerLevelExample().createCriteria().andEnableEqualTo(1).example()); } public boolean check(YxxOrder order, Long workerId) { return this.check(order, workerMapper.selectByPrimaryKey(workerId)); } public boolean check(YxxOrder order, YxxWorker worker) { YxxWorkerLevel workerLevel = this.getLevel(worker.getLevelId()); YxxWorkerOrderCount count = this.countByWorkerId(worker.getId()); // 判断订单类型 if (order.getOrderType() == OrderType.SYSTEM_DISTRIBUTE.val()) { // 系统分配订单 return workerLevel.getOrderDistributeAmount() >= count.getDistributeAmount() + 1; } if (order.getOrderType() == OrderType.SYSTEM_RUSH.val()) { // 询价单 if (order.getIsBargain() == 1) { return workerLevel.getOrderRushAmountBargain() >= count.getRushAmount() + 1; } else { return workerLevel.getOrderRushAmount() >= count.getRushAmount() + 1; } } if (order.getOrderType() == OrderType.MANUAL_DISTRIBUTE.val()) { return workerLevel.getOrderAssignAmount() >= count.getAssignAmount() + 1; } return true; } public int updateWorkerOrderCount(Long workerId, YxxOrder order) { // 判断订单类型 if (order.getOrderType() == OrderType.SYSTEM_DISTRIBUTE.val()) { // 系统分配订单 return orderCommonDao.updateWorkerOrderDistributeCount(workerId); } if (order.getOrderType() == OrderType.SYSTEM_RUSH.val()) { // 询价单 if (order.getIsBargain() == 1) { return orderCommonDao.updateWorkerOrderRushBargainCount(workerId); } return orderCommonDao.updateWorkerOrderRushCount(workerId); } if (order.getOrderType() == OrderType.MANUAL_DISTRIBUTE.val()) { return orderCommonDao.updateWorkerOrderAssignCount(workerId); } return 0; } public YxxOrderDetail detail(Long orderId) { YxxOrderDetail detail = new YxxOrderDetail(); // 订单信息 YxxOrderInfo orderInfo = orderCommonDao.queryInfoById(orderId); orderInfo.setOrderStatusName(OrderStatusUtil.getStatusName(orderInfo.getOrderStatus())); detail.setYxxOrder(orderInfo); // 子订单信息 detail.setItemList(orderCommonDao.queryOrderItemList(orderId)); // 维修工单信息 detail.setRepairRecord(repairRecordMapper.selectOneByExample(new YxxRepairRecordExample().createCriteria() .andOrderIdEqualTo(orderId).example())); // 订单状态变更记录 List<YxxOrderStatusRecord> recordList = orderStatusRecordMapper.selectByExample(new YxxOrderStatusRecordExample() .createCriteria().andOrderIdEqualTo(orderId).example().orderBy(YxxOrderStatusRecord.Column.createTime.desc())); List<YxxOrderStatusRecordInfo> recordInfoList = new ArrayList<>(); for (YxxOrderStatusRecord record : recordList) { recordInfoList.add(new YxxOrderStatusRecordInfo(record)); } detail.setOrderStatusRecordList(recordInfoList); detail.setMember(memberMapper.selectByPrimaryKey(orderInfo.getMemberId())); detail.setWorker(workerMapper.selectByPrimaryKey(orderInfo.getWorkerId())); PmsProduct product = productMapper.selectByPrimaryKeySelective(orderInfo.getProductId(), PmsProduct.Column.excludes(PmsProduct.Column.detailHtml)); detail.setProduct(product); return detail; } public CommonPage<YxxOrderInfo> pageQueryInfo(Integer status, Integer pageNum, Integer pageSize) { PageHelper.startPage(pageNum, pageSize); List<YxxOrderInfo> orderInfoList = orderCommonDao.queryList(null, getOrderStatusArray(status)); return CommonPage.restPage(orderInfoList); } /** * 后台管理分页查询 * * @param query 查询参数 {@link OrderQuery} * @param pageNum 分页参数 * @param pageSize 分页参数 * @return {@link YxxOrderInfo} */ public CommonPage<YxxOrderInfo> pageQueryInfo(OrderQuery query, Integer pageNum, Integer pageSize) { PageHelper.startPage(pageNum, pageSize); List<YxxOrderInfo> orderInfoList = orderCommonDao.queryListByOrderQuery(query); this.dealStatusDesc(orderInfoList); return CommonPage.restPage(orderInfoList); } /** * // 处理状态名称 * * @param orderInfoList 订单列表 */ private void dealStatusDesc(List<YxxOrderInfo> orderInfoList) { if (!orderInfoList.isEmpty()) { orderInfoList.forEach(yxxOrderInfo -> yxxOrderInfo.setOrderStatusName( OrderStatusUtil.getStatusName(yxxOrderInfo.getOrderStatus()))); } } private Integer[] getOrderStatusArray(Integer status) { if (status == null) { return new Integer[0]; } // 待确认 if (status == 1) { return new Integer[]{CREATED.val(), DISTRIBUTING.val(), DISTRIBUTED.val(), RECEIVED.val()}; } // 进行中 if (status == 2) { return new Integer[]{WAITED_GET.val(), SET_OFF.val(), ARRIVED.val(), OFFERED.val(), OFFER_CONFIRMED.val(), REPAIRED.val(), REPAIRING.val()}; } // 待评价 if (status == 3) { return new Integer[]{PAYED.val()}; } if (status == 4) { return new Integer[]{FREE_CANCEL.val(), ARRIVED_CANCEL.val(), CS_CANCEL.val(), COMPLETED.val()}; } return new Integer[0]; } /** * 维修工查询指派订单列表 * * @param worker 维修工 * @return 订单列表 */ public List<YxxOrderInfo> queryDistributeOrder(YxxWorker worker) { List<YxxOrderInfo> orderInfoList = orderCommonDao.queryInfoByWorkerId(worker.getId(), DISTRIBUTED.val()); this.dealStatusDesc(orderInfoList); return orderInfoList; } public List<YxxOrderInfo> queryRushOrders(Integer pageNum, Integer pageSize) { List<Long> ids = distributorService.getRushOrderIds(); if (ids == null || ids.isEmpty()) { return new ArrayList<>(); } PageHelper.startPage(pageNum, pageSize); List<YxxOrderInfo> orderInfoList = orderCommonDao.queryRushOrders(ids, DISTRIBUTING.val()); this.dealStatusDesc(orderInfoList); return orderInfoList; } /** * 手动指派订单 * * @param orderId 订单ID * @param workerId 维修工ID */ public int distribute(Long orderId, Long workerId) { YxxOrder order = orderMapper.selectByPrimaryKey(orderId); // 判断订单类型 if (order.getOrderType() != OrderType.MANUAL_DISTRIBUTE.val()) { return 0; } // 判断订单状态 if (DISTRIBUTING.val() != order.getOrderStatus()) { return 0; } // 判断维修工每日抢单数量是否超限 boolean result = check(order, workerId); if (!result) { return 0; } this.updateWorkerOrderCount(workerId, order); orderStatusRecordMapper.insert(YxxOrderStatusRecord.builder() .createTime(new Date()).remark("管理员手动指派订单").orderId(orderId) .originStatus(DISTRIBUTING.val()).currentStatus(RECEIVED.val()) .build()); // 更新订单状态 return orderMapper.updateByPrimaryKeySelective(YxxOrder.builder() .id(orderId).updateTime(new Date()).orderStatus(RECEIVED.val()).workerId(workerId) .build()); } }
Aodacat6/my-shop-server
shop-server-api/src/main/java/com/onlythinking/shop/core/constant/DicConstant.java
<gh_stars>10-100 package com.onlythinking.shop.core.constant; import com.google.common.collect.Sets; import java.util.Set; /** * <p> The describe </p> * * @author <NAME> */ public interface DicConstant { // 是否 String TYPE_1000 = "1000"; String VAL_1000_00 = "100000"; // 是 String VAL_1000_01 = "100001"; // 否 // 性别 String TYPE_1001 = "1001"; String VAL_1001_00 = "100100"; // 未知 String VAL_1001_01 = "100101"; // 男 String VAL_1001_02 = "100102"; // 女 // 任务触发器类型 String TYPE_1002 = "1002"; String VAL_1002_00 = "100200"; // 固定频率 SimpleTrigger|CalendarIntervalTrigger String VAL_1002_01 = "100201"; // 每日时间段 DailyTimeIntervalTrigger String VAL_1002_02 = "100202"; // CRON CronTrigger // 任务实例状态 String TYPE_1003 = "1003"; String VAL_1003_00 = "100300"; // 运行中 String VAL_1003_01 = "100301"; // 暂停 String VAL_1003_02 = "100302"; // 失败 // 任务运行状态 String TYPE_1004 = "1004"; String VAL_1004_00 = "100400"; // 成功 String VAL_1004_01 = "100401"; // 失败 String TYPE_1005 = "1005"; // 用户登录模式 String VAL_1005_00 = "100500"; // 微信 String VAL_1005_01 = "100501"; // 用户名 Set<String> CALENDAR_INTERVAL = Sets.newHashSet( "DAY", "WEEK", "MONTH", "YEAR" ); Set<String> ALL_INTERVAL = Sets.newHashSet( "MILLISECOND", "SECOND", "MINUTE", "HOUR", "DAY", "WEEK", "MONTH", "YEAR" ); }
allansrc/fuchsia
src/graphics/lib/magma/src/magma_util/platform/linux/linux_platform_port.cc
<gh_stars>100-1000 // Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "magma_util/macros.h" #include "platform_port.h" namespace magma { class LinuxPlatformPort : public PlatformPort { public: void Close() override {} Status Wait(uint64_t* key_out, uint64_t timeout_ms) override { return DRET(MAGMA_STATUS_UNIMPLEMENTED); } }; std::unique_ptr<PlatformPort> PlatformPort::Create() { return DRETP(nullptr, "PlatformPort::Create not supported"); } } // namespace magma
lectureStudio/avdev
avdev-jni/src/main/cpp/dependencies/windows/mf/src/MFInitializer.cpp
<gh_stars>1-10 /* * Copyright 2016 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "MFInitializer.h" #include "WindowsHelper.h" namespace avdev { MFInitializer::MFInitializer() : initialized(false) { HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_LITE); THROW_IF_FAILED(hr, "Initialize Media Foundation failed."); initialized = true; } MFInitializer::~MFInitializer() { if (initialized) { MFShutdown(); } } bool MFInitializer::isInitialized() { return initialized; } }
mjohns-databricks/rasterframes
core/src/test/scala/org/locationtech/rasterframes/MetadataSpec.scala
/* * This software is licensed under the Apache 2 license, quoted below. * * Copyright 2017 Astraea, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * [http://www.apache.org/licenses/LICENSE-2.0] * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * SPDX-License-Identifier: Apache-2.0 * */ package org.locationtech.rasterframes import org.apache.spark.sql.types.MetadataBuilder /** * Test rig for column metadata management. * * @since 9/6/17 */ class MetadataSpec extends TestEnvironment with TestData { import spark.implicits._ private val sampleMetadata = new MetadataBuilder().putBoolean("haz", true).putLong("baz", 42).build() describe("Metadata storage") { it("should serialize and attach metadata") { //val rf = sampleGeoTiff.projectedRaster.toLayer(128, 128) val df = spark.createDataset(Seq((1, "one"), (2, "two"), (3, "three"))).toDF("num", "str") val withmeta = df.mapColumnAttribute($"num", attr => { attr.withMetadata(sampleMetadata) }) val meta2 = withmeta.fetchMetadataValue($"num", _.metadata) assert(Some(sampleMetadata) === meta2) } it("should handle post-join duplicate column names") { val df1 = spark.createDataset(Seq((1, "one"), (2, "two"), (3, "three"))).toDF("num", "str") val df2 = spark.createDataset(Seq((1, "a"), (2, "b"), (3, "c"))).toDF("num", "str") val joined = df1.as("a").join(df2.as("b"), "num") val withmeta = joined.mapColumnAttribute(df1("str"), attr => { attr.withMetadata(sampleMetadata) }) val meta2 = withmeta.fetchMetadataValue($"str", _.metadata) assert(Some(sampleMetadata) === meta2) } } }
Jorble/KnowledgeBase
app/src/main/java/com/giant/knowledgebase/mvp/model/entity/SearchHistoryBean.java
package com.giant.knowledgebase.mvp.model.entity; import org.greenrobot.greendao.annotation.Entity; import org.greenrobot.greendao.annotation.Id; import org.greenrobot.greendao.annotation.NotNull; import org.greenrobot.greendao.annotation.Generated; import org.greenrobot.greendao.annotation.Unique; /** * Created by Jorble on 2017/6/22. */ @Entity public class SearchHistoryBean { @Id(autoincrement = true) private Long id; @NotNull private String history; @Generated(hash = 1724186286) public SearchHistoryBean(Long id, @NotNull String history) { this.id = id; this.history = history; } @Generated(hash = 1570282321) public SearchHistoryBean() { } public Long getId() { return this.id; } public void setId(Long id) { this.id = id; } public String getHistory() { return this.history; } public void setHistory(String history) { this.history = history; } }
pbaris/jmix
jmix-search/search/src/test/java/test_support/entity/TestSimpleRootEntity.java
/* * Copyright 2021 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test_support.entity; import io.jmix.core.metamodel.annotation.InstanceName; import io.jmix.core.metamodel.annotation.JmixEntity; import javax.persistence.*; import java.util.Date; @JmixEntity @Entity(name = "test_SimpleRootEntity") @Table(name = "TEST_SIMPLE_ROOT_ENTITY") public class TestSimpleRootEntity extends BaseEntity { @InstanceName @Column(name = "NAME") private String name; @Column(name = "FIRST_TEXT_VALUE") private String firstTextValue; @Column(name = "SECOND_TEXT_VALUE") private String secondTextValue; @Column(name = "INT_VALUE") private Integer intValue; @Temporal(TemporalType.DATE) @Column(name = "DATE_VALUE") private Date dateValue; public Date getDateValue() { return dateValue; } public void setDateValue(Date dateValue) { this.dateValue = dateValue; } public Integer getIntValue() { return intValue; } public void setIntValue(Integer intValue) { this.intValue = intValue; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getFirstTextValue() { return firstTextValue; } public void setFirstTextValue(String firstTextValue) { this.firstTextValue = firstTextValue; } public String getSecondTextValue() { return secondTextValue; } public void setSecondTextValue(String secondTextValue) { this.secondTextValue = secondTextValue; } }
bauen1/myunix
kernel/arch/i686/memcpy.c
#include <assert.h> #include <stdint.h> #include <memcpy.h> void *memcpy(void *dest, const void *src, size_t len) { assert(dest != NULL); assert(src != NULL); assert(len != 0); __asm__ __volatile__ ( "rep movsb" : "=D" (dest), "=S" (src), "=c" (len) : "0" (dest), "1" (src), "2" (len) : "memory"); return dest; }
Vasyka/koku
koku/reporting/migrations/0123_auto_20200727_2302.py
# Generated by Django 2.2.14 on 2020-07-27 23:02 import django.db.models.deletion from django.db import migrations from django.db import models class Migration(migrations.Migration): dependencies = [("reporting", "0122_auto_20200803_2307")] operations = [ migrations.DeleteModel(name="OCPStorageVolumeClaimLabelSummary"), migrations.AddIndex( model_name="awscostentrylineitemdaily", index=models.Index(fields=["resource_id"], name="resource_id_idx") ), ]
bpluly/crichton
crichtonweb/requirement/migrations/0001_initial.py
<filename>crichtonweb/requirement/migrations/0001_initial.py<gh_stars>0 # Crichton, Admirable Source Configuration Management # Copyright 2012 British Broadcasting Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'PackageSpecificationAuditLogEntry' db.create_table('requirement_packagespecificationauditlogentry', ( ('id', self.gf('django.db.models.fields.IntegerField')(db_index=True, blank=True)), ('package', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['package.PackageName'])), ('version_specification', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['requirement.VersionSpecification'], null=True, blank=True)), ('deleted', self.gf('django.db.models.fields.BooleanField')(default=False)), ('action_id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('action_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)), ('action_user', self.gf('audit_log.models.fields.LastUserField')(related_name='_packagespecification_audit_log_entry')), ('action_type', self.gf('django.db.models.fields.CharField')(max_length=1)), )) db.send_create_signal('requirement', ['PackageSpecificationAuditLogEntry']) # Adding model 'PackageSpecification' db.create_table('requirement_packagespecification', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('package', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['package.PackageName'])), ('version_specification', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['requirement.VersionSpecification'], null=True, blank=True)), ('deleted', self.gf('django.db.models.fields.BooleanField')(default=False)), )) db.send_create_signal('requirement', ['PackageSpecification']) # Adding unique constraint on 'PackageSpecification', fields ['package', 'version_specification'] db.create_unique('requirement_packagespecification', ['package_id', 'version_specification_id']) # Adding model 'VersionRange' db.create_table('requirement_versionrange', ( ('name', self.gf('django.db.models.fields.CharField')(max_length=103, primary_key=True)), ('minimum', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['package.Version'])), ('minimum_is_inclusive', self.gf('django.db.models.fields.BooleanField')(default=True)), ('maximum', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['package.Version'])), ('maximum_is_inclusive', self.gf('django.db.models.fields.BooleanField')(default=False)), )) db.send_create_signal('requirement', ['VersionRange']) # Adding model 'VersionSpecification' db.create_table('requirement_versionspecification', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('version', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['package.Version'])), ('version_range', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='+', null=True, to=orm['requirement.VersionRange'])), )) db.send_create_signal('requirement', ['VersionSpecification']) # Adding unique constraint on 'VersionSpecification', fields ['version', 'version_range'] db.create_unique('requirement_versionspecification', ['version_id', 'version_range_id']) # Adding model 'RequirementAuditLogEntry' db.create_table('requirement_requirementauditlogentry', ( ('id', self.gf('django.db.models.fields.IntegerField')(db_index=True, blank=True)), ('application', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['prodmgmt.Application'])), ('default_specification', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='_auditlog_default_specifications', null=True, to=orm['requirement.PackageSpecification'])), ('deleted', self.gf('django.db.models.fields.BooleanField')(default=False)), ('action_id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('action_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)), ('action_user', self.gf('audit_log.models.fields.LastUserField')(related_name='_requirement_audit_log_entry')), ('action_type', self.gf('django.db.models.fields.CharField')(max_length=1)), )) db.send_create_signal('requirement', ['RequirementAuditLogEntry']) # Adding model 'Requirement' db.create_table('requirement_requirement', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('application', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['prodmgmt.Application'], unique=True)), ('default_specification', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='default_specifications', null=True, to=orm['requirement.PackageSpecification'])), ('deleted', self.gf('django.db.models.fields.BooleanField')(default=False)), )) db.send_create_signal('requirement', ['Requirement']) # Adding model 'EnvironmentRequirementAuditLogEntry' db.create_table('requirement_environmentrequirementauditlogentry', ( ('id', self.gf('django.db.models.fields.IntegerField')(db_index=True, blank=True)), ('specification', self.gf('django.db.models.fields.related.ForeignKey')(related_name='_auditlog_environment_requirements', to=orm['requirement.PackageSpecification'])), ('environment', self.gf('django.db.models.fields.related.ForeignKey')(related_name='_auditlog_requirements', to=orm['system.Environment'])), ('requirement', self.gf('django.db.models.fields.related.ForeignKey')(related_name='_auditlog_environment_specifications', to=orm['requirement.Requirement'])), ('deleted', self.gf('django.db.models.fields.BooleanField')(default=False)), ('action_id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('action_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)), ('action_user', self.gf('audit_log.models.fields.LastUserField')(related_name='_environmentrequirement_audit_log_entry')), ('action_type', self.gf('django.db.models.fields.CharField')(max_length=1)), )) db.send_create_signal('requirement', ['EnvironmentRequirementAuditLogEntry']) # Adding model 'EnvironmentRequirement' db.create_table('requirement_environmentrequirement', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('specification', self.gf('django.db.models.fields.related.ForeignKey')(related_name='environment_requirements', to=orm['requirement.PackageSpecification'])), ('environment', self.gf('django.db.models.fields.related.ForeignKey')(related_name='requirements', to=orm['system.Environment'])), ('requirement', self.gf('django.db.models.fields.related.ForeignKey')(related_name='environment_specifications', to=orm['requirement.Requirement'])), ('deleted', self.gf('django.db.models.fields.BooleanField')(default=False)), )) db.send_create_signal('requirement', ['EnvironmentRequirement']) # Adding unique constraint on 'EnvironmentRequirement', fields ['specification', 'environment', 'requirement'] db.create_unique('requirement_environmentrequirement', ['specification_id', 'environment_id', 'requirement_id']) def backwards(self, orm): # Removing unique constraint on 'EnvironmentRequirement', fields ['specification', 'environment', 'requirement'] db.delete_unique('requirement_environmentrequirement', ['specification_id', 'environment_id', 'requirement_id']) # Removing unique constraint on 'VersionSpecification', fields ['version', 'version_range'] db.delete_unique('requirement_versionspecification', ['version_id', 'version_range_id']) # Removing unique constraint on 'PackageSpecification', fields ['package', 'version_specification'] db.delete_unique('requirement_packagespecification', ['package_id', 'version_specification_id']) # Deleting model 'PackageSpecificationAuditLogEntry' db.delete_table('requirement_packagespecificationauditlogentry') # Deleting model 'PackageSpecification' db.delete_table('requirement_packagespecification') # Deleting model 'VersionRange' db.delete_table('requirement_versionrange') # Deleting model 'VersionSpecification' db.delete_table('requirement_versionspecification') # Deleting model 'RequirementAuditLogEntry' db.delete_table('requirement_requirementauditlogentry') # Deleting model 'Requirement' db.delete_table('requirement_requirement') # Deleting model 'EnvironmentRequirementAuditLogEntry' db.delete_table('requirement_environmentrequirementauditlogentry') # Deleting model 'EnvironmentRequirement' db.delete_table('requirement_environmentrequirement') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'issue.issue': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('name', 'project'),)", 'object_name': 'Issue'}, 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'issues'", 'to': "orm['issue.IssueTrackerProject']"}) }, 'issue.issuetracker': { 'Meta': {'ordering': "('name',)", 'object_name': 'IssueTracker'}, 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'display_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'issue_url_pattern': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}), 'name': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}), 'tracker_type': ('django.db.models.fields.CharField', [], {'default': "'jira'", 'max_length': '12'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}) }, 'issue.issuetrackerproject': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('name', 'issue_tracker'),)", 'object_name': 'IssueTrackerProject'}, 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'display_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'issue_tracker': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'projects'", 'to': "orm['issue.IssueTracker']"}), 'name': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}) }, 'package.packagename': { 'Meta': {'ordering': "('name',)", 'object_name': 'PackageName'}, 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'primary_key': 'True'}) }, 'package.version': { 'Meta': {'ordering': "('major', 'minor', 'micro', 'revision')", 'object_name': 'Version'}, 'build': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'major': ('django.db.models.fields.IntegerField', [], {}), 'micro': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'minor': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}), 'revision': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'rpm_release': ('django.db.models.fields.CharField', [], {'max_length': '48', 'null': 'True', 'blank': 'True'}), 'rpm_version': ('django.db.models.fields.CharField', [], {'max_length': '48', 'null': 'True', 'blank': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'release'", 'max_length': '16'}) }, 'prodmgmt.application': { 'Meta': {'ordering': "('name',)", 'object_name': 'Application'}, 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'display_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}), 'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': "orm['prodmgmt.Product']"}) }, 'prodmgmt.person': { 'Meta': {'ordering': "('username',)", 'object_name': 'Person'}, 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'distinguished_name': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'prodmgmt.product': { 'Meta': {'ordering': "('name',)", 'object_name': 'Product'}, 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'display_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owned_products'", 'to': "orm['prodmgmt.Person']"}), 'pipeline_issue': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['issue.Issue']"}) }, 'requirement.environmentrequirement': { 'Meta': {'unique_together': "(('specification', 'environment', 'requirement'),)", 'object_name': 'EnvironmentRequirement'}, 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'requirements'", 'to': "orm['system.Environment']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'requirement': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'environment_specifications'", 'to': "orm['requirement.Requirement']"}), 'specification': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'environment_requirements'", 'to': "orm['requirement.PackageSpecification']"}) }, 'requirement.environmentrequirementauditlogentry': { 'Meta': {'ordering': "('-action_date',)", 'object_name': 'EnvironmentRequirementAuditLogEntry'}, 'action_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'action_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'action_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'action_user': ('audit_log.models.fields.LastUserField', [], {'related_name': "'_environmentrequirement_audit_log_entry'"}), 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_auditlog_requirements'", 'to': "orm['system.Environment']"}), 'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}), 'requirement': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_auditlog_environment_specifications'", 'to': "orm['requirement.Requirement']"}), 'specification': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_auditlog_environment_requirements'", 'to': "orm['requirement.PackageSpecification']"}) }, 'requirement.packagespecification': { 'Meta': {'unique_together': "(('package', 'version_specification'),)", 'object_name': 'PackageSpecification'}, 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['package.PackageName']"}), 'version_specification': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['requirement.VersionSpecification']", 'null': 'True', 'blank': 'True'}) }, 'requirement.packagespecificationauditlogentry': { 'Meta': {'ordering': "('-action_date',)", 'object_name': 'PackageSpecificationAuditLogEntry'}, 'action_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'action_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'action_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'action_user': ('audit_log.models.fields.LastUserField', [], {'related_name': "'_packagespecification_audit_log_entry'"}), 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}), 'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['package.PackageName']"}), 'version_specification': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['requirement.VersionSpecification']", 'null': 'True', 'blank': 'True'}) }, 'requirement.requirement': { 'Meta': {'object_name': 'Requirement'}, 'application': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['prodmgmt.Application']", 'unique': 'True'}), 'default_specification': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'default_specifications'", 'null': 'True', 'to': "orm['requirement.PackageSpecification']"}), 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'requirement.requirementauditlogentry': { 'Meta': {'ordering': "('-action_date',)", 'object_name': 'RequirementAuditLogEntry'}, 'action_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'action_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'action_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'action_user': ('audit_log.models.fields.LastUserField', [], {'related_name': "'_requirement_audit_log_entry'"}), 'application': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['prodmgmt.Application']"}), 'default_specification': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'_auditlog_default_specifications'", 'null': 'True', 'to': "orm['requirement.PackageSpecification']"}), 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}) }, 'requirement.versionrange': { 'Meta': {'object_name': 'VersionRange'}, 'maximum': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['package.Version']"}), 'maximum_is_inclusive': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'minimum': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['package.Version']"}), 'minimum_is_inclusive': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '103', 'primary_key': 'True'}) }, 'requirement.versionspecification': { 'Meta': {'unique_together': "(('version', 'version_range'),)", 'object_name': 'VersionSpecification'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'version': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['package.Version']"}), 'version_range': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['requirement.VersionRange']"}) }, 'system.environment': { 'Meta': {'ordering': "('name',)", 'object_name': 'Environment'}, 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}) } } complete_apps = ['requirement']
mglantz/insights-core
insights/parsers/rabbitmq_log.py
""" RabbitMQ Logs ============= Module for parsing the log files for RabbitMQ: RabbitMQLogs - file ``/var/log/rabbitmq/rabbit@$HOSTNAME.log`` -------------------------------------------------------------- RabbitMQStartupErrLog - file ``/var/log/rabbitmq/startup_err`` -------------------------------------------------------------- RabbitMQStartupLog - file ``/var/log/rabbitmq/startup_log`` ----------------------------------------------------------- """ from .. import LogFileOutput, parser from insights.specs import Specs @parser(Specs.rabbitmq_startup_log) class RabbitMQStartupLog(LogFileOutput): """Class for parsing ``/var/log/rabbitmq/startup_log`` file. Typical content of ``startup_log`` file is:: Starting all nodes... Starting node rabbit@ubuntu... +---+ +---+ | | | | | | | | | | | | | +---+ +-------+ | | | RabbitMQ +---+ | | | | | | v1.8.0 +---+ | | | +-------------------+ AMQP 8-0 Copyright (C) 2007-2010 LShift Ltd., Cohesive Financial Technologies LLC., and Rabbit Technologies Ltd. Licensed under the MPL. See http://www.rabbitmq.com/ node : rabbit@ubuntu app descriptor : /usr/lib/rabbitmq/lib/rabbitmq_server-1.8.0/sbin/../ebin/rabbit.app home dir : /var/lib/rabbitmq cookie hash : mfoMkOc9CYok/SmH7RH9Jg== log : /var/log/rabbitmq/rabbit@ubuntu.log sasl log : /var/log/rabbitmq/rabbit@ubuntu-sasl.log database dir : /var/lib/rabbitmq/mnesia/rabbit@ubuntu erlang version : 5.7.4 starting file handle cache server ...done starting worker pool ...done starting database ...done starting empty DB check ...done starting exchange recovery ...done starting queue supervisor and queue recovery ...BOOT ERROR: FAILED Note: Please refer to its super-class ``LogFileOutput`` """ pass @parser(Specs.rabbitmq_startup_err) class RabbitMQStartupErrLog(LogFileOutput): """Class for parsing ``/var/log/rabbitmq/startup_err`` file. Typical content of ``startup_err`` file is:: Error: {node_start_failed,normal} Crash dump was written to: erl_crash.dump Kernel pid terminated (application_controller) ({application_start_failure,kernel,{shutdown,{kernel,start,[normal,[]]}}}) .. note:: Please refer to its super-class :py:class:`insights.core.LogFileOutput` """ pass @parser(Specs.rabbitmq_logs) class RabbitMQLogs(LogFileOutput): """Class for parsing ``/var/log/rabbitmq/rabbit@$HOSTNAME.log`` file Typical content of ``rabbit@$HOSTNAME.log`` file is:: =INFO REPORT==== 9-Nov-2016::14:29:11 === Starting RabbitMQ 3.6.3 on Erlang 18.3.4.4 Copyright (C) 2007-2016 Pivotal Software, Inc. Licensed under the MPL. See http://www.rabbitmq.com/ =INFO REPORT==== 9-Nov-2016::14:29:11 === node : rabbit@overcloud-controller-0 home dir : /var/lib/rabbitmq config file(s) : /etc/rabbitmq/rabbitmq.config cookie hash : F7g8XhNTzvEK3KywLHh9yA== log : /var/log/rabbitmq/rabbit@overcloud-controller-0.log sasl log : /var/log/rabbitmq/rabbit@overcloud-controller-0-sasl.log database dir : /var/lib/rabbitmq/mnesia/rabbit@overcloud-controller-0 ... .. note:: Please refer to its super-class :py:class:`insights.core.LogFileOutput` for full usage. .. note:: Because this parser is defined using a PatternSpec, which returns multiple files, the data in the shared parser state is a list of these parser objects. This means that for the moment you will have to iterate across these objects directly. Examples: >>> for log in shared[RabbitMQLogs]: ... print 'log file:', log.file_path ... print 'INFO lines:', len(log.get('INFO REPORT')) ... print 'ERROR lines:', len(log.get('ERROR REPORT')) ... log file: /var/log/rabbitmq/rabbit@queue.example.com.log INFO lines: 2 ERROR lines: 0 """ time_format = '%d-%b-%Y::%H:%M:%S'
djsauble/puppet-dashboard
vendor/plugins/delayed_job/spec/backend/shared_backend_spec.rb
<reponame>djsauble/puppet-dashboard shared_examples_for 'a backend' do def create_job(opts = {}) @backend.create(opts.merge(:payload_object => SimpleJob.new)) end before do Delayed::Worker.max_priority = nil Delayed::Worker.min_priority = nil Delayed::Worker.default_priority = 99 SimpleJob.runs = 0 end it "should set run_at automatically if not set" do @backend.create(:payload_object => ErrorJob.new ).run_at.should_not be_nil end it "should not set run_at automatically if already set" do later = @backend.db_time_now + 5.minutes @backend.create(:payload_object => ErrorJob.new, :run_at => later).run_at.should be_close(later, 1) end it "should raise ArgumentError when handler doesn't respond_to :perform" do lambda { @backend.enqueue(Object.new) }.should raise_error(ArgumentError) end it "should increase count after enqueuing items" do @backend.enqueue SimpleJob.new @backend.count.should == 1 end it "should be able to set priority when enqueuing items" do @job = @backend.enqueue SimpleJob.new, 5 @job.priority.should == 5 end it "should use default priority when it is not set" do @job = @backend.enqueue SimpleJob.new @job.priority.should == 99 end it "should be able to set run_at when enqueuing items" do later = @backend.db_time_now + 5.minutes @job = @backend.enqueue SimpleJob.new, 5, later @job.run_at.should be_close(later, 1) end it "should work with jobs in modules" do M::ModuleJob.runs = 0 job = @backend.enqueue M::ModuleJob.new lambda { job.invoke_job }.should change { M::ModuleJob.runs }.from(0).to(1) end it "should raise an DeserializationError when the job class is totally unknown" do job = @backend.new :handler => "--- !ruby/object:JobThatDoesNotExist {}" lambda { job.payload_object.perform }.should raise_error(Delayed::DeserializationError) end it "should raise an DeserializationError when the job is badly encoded" do job = @backend.new :handler => "--- !ruby/object:SimpleJob {" lambda { job.payload_object.perform }.should raise_error(Delayed::DeserializationError) end it "should try to load the class when it is unknown at the time of the deserialization" do job = @backend.new :handler => "--- !ruby/object:JobThatDoesNotExist {}" job.should_receive(:attempt_to_load).with('JobThatDoesNotExist').and_return(true) lambda { job.payload_object.perform }.should raise_error(Delayed::DeserializationError) end it "should try include the namespace when loading unknown objects" do job = @backend.new :handler => "--- !ruby/object:Delayed::JobThatDoesNotExist {}" job.should_receive(:attempt_to_load).with('Delayed::JobThatDoesNotExist').and_return(true) lambda { job.payload_object.perform }.should raise_error(Delayed::DeserializationError) end it "should also try to load structs when they are unknown (raises TypeError)" do job = @backend.new :handler => "--- !ruby/struct:JobThatDoesNotExist {}" job.should_receive(:attempt_to_load).with('JobThatDoesNotExist').and_return(true) lambda { job.payload_object.perform }.should raise_error(Delayed::DeserializationError) end it "should try include the namespace when loading unknown structs" do job = @backend.new :handler => "--- !ruby/struct:Delayed::JobThatDoesNotExist {}" job.should_receive(:attempt_to_load).with('Delayed::JobThatDoesNotExist').and_return(true) lambda { job.payload_object.perform }.should raise_error(Delayed::DeserializationError) end describe "find_available" do it "should not find failed jobs" do @job = create_job :attempts => 50, :failed_at => @backend.db_time_now @backend.find_available('worker', 5, 1.second).should_not include(@job) end it "should not find jobs scheduled for the future" do @job = create_job :run_at => (@backend.db_time_now + 1.minute) @backend.find_available('worker', 5, 4.hours).should_not include(@job) end it "should not find jobs locked by another worker" do @job = create_job(:locked_by => 'other_worker', :locked_at => @backend.db_time_now - 1.minute) @backend.find_available('worker', 5, 4.hours).should_not include(@job) end it "should find open jobs" do @job = create_job @backend.find_available('worker', 5, 4.hours).should include(@job) end it "should find expired jobs" do @job = create_job(:locked_by => 'worker', :locked_at => @backend.db_time_now - 2.minutes) @backend.find_available('worker', 5, 1.minute).should include(@job) end it "should find own jobs" do @job = create_job(:locked_by => 'worker', :locked_at => (@backend.db_time_now - 1.minutes)) @backend.find_available('worker', 5, 4.hours).should include(@job) end it "should find only the right amount of jobs" do 10.times { create_job } @backend.find_available('worker', 7, 4.hours).should have(7).jobs end end context "when another worker is already performing an task, it" do before :each do @job = @backend.create :payload_object => SimpleJob.new, :locked_by => 'worker1', :locked_at => @backend.db_time_now - 5.minutes end it "should not allow a second worker to get exclusive access" do @job.lock_exclusively!(4.hours, 'worker2').should == false end it "should allow a second worker to get exclusive access if the timeout has passed" do @job.lock_exclusively!(1.minute, 'worker2').should == true end it "should be able to get access to the task if it was started more then max_age ago" do @job.locked_at = 5.hours.ago @job.save @job.lock_exclusively! 4.hours, 'worker2' @job.reload @job.locked_by.should == 'worker2' @job.locked_at.should > 1.minute.ago end it "should not be found by another worker" do @backend.find_available('worker2', 1, 6.minutes).length.should == 0 end it "should be found by another worker if the time has expired" do @backend.find_available('worker2', 1, 4.minutes).length.should == 1 end it "should be able to get exclusive access again when the worker name is the same" do @job.lock_exclusively!(5.minutes, 'worker1').should be_true @job.lock_exclusively!(5.minutes, 'worker1').should be_true @job.lock_exclusively!(5.minutes, 'worker1').should be_true end end context "when another worker has worked on a task since the job was found to be available, it" do before :each do @job = @backend.create :payload_object => SimpleJob.new @job_copy_for_worker_2 = @backend.find(@job.id) end it "should not allow a second worker to get exclusive access if already successfully processed by worker1" do @job.destroy @job_copy_for_worker_2.lock_exclusively!(4.hours, 'worker2').should == false end it "should not allow a second worker to get exclusive access if failed to be processed by worker1 and run_at time is now in future (due to backing off behaviour)" do @job.update_attributes(:attempts => 1, :run_at => 1.day.from_now) @job_copy_for_worker_2.lock_exclusively!(4.hours, 'worker2').should == false end end describe "reserve" do before do Delayed::Worker.max_run_time = 2.minutes @worker = Delayed::Worker.new(:quiet => true) end it "should not reserve failed jobs" do create_job :attempts => 50, :failed_at => described_class.db_time_now described_class.reserve(@worker).should be_nil end it "should not reserve jobs scheduled for the future" do create_job :run_at => (described_class.db_time_now + 1.minute) described_class.reserve(@worker).should be_nil end it "should lock the job so other workers can't reserve it" do job = create_job described_class.reserve(@worker).should == job new_worker = Delayed::Worker.new(:quiet => true) new_worker.name = 'worker2' described_class.reserve(new_worker).should be_nil end it "should reserve open jobs" do job = create_job described_class.reserve(@worker).should == job end it "should reserve expired jobs" do job = create_job(:locked_by => @worker.name, :locked_at => described_class.db_time_now - 3.minutes) described_class.reserve(@worker).should == job end it "should reserve own jobs" do job = create_job(:locked_by => @worker.name, :locked_at => (described_class.db_time_now - 1.minutes)) described_class.reserve(@worker).should == job end end context "#name" do it "should be the class name of the job that was enqueued" do @backend.create(:payload_object => ErrorJob.new ).name.should == 'ErrorJob' end it "should be the method that will be called if its a performable method object" do @job = Story.delay.create @job.name.should == "Story.create" end it "should be the instance method that will be called if its a performable method object" do @job = Story.create(:text => "...").delay.save @job.name.should == 'Story#save' end end context "worker prioritization" do before(:each) do Delayed::Worker.max_priority = nil Delayed::Worker.min_priority = nil end it "should fetch jobs ordered by priority" do 10.times { @backend.enqueue SimpleJob.new, rand(10) } jobs = @backend.find_available('worker', 10) jobs.size.should == 10 jobs.each_cons(2) do |a, b| a.priority.should <= b.priority end end it "should only find jobs greater than or equal to min priority" do min = 5 Delayed::Worker.min_priority = min 10.times {|i| @backend.enqueue SimpleJob.new, i } jobs = @backend.find_available('worker', 10) jobs.each {|job| job.priority.should >= min} end it "should only find jobs less than or equal to max priority" do max = 5 Delayed::Worker.max_priority = max 10.times {|i| @backend.enqueue SimpleJob.new, i } jobs = @backend.find_available('worker', 10) jobs.each {|job| job.priority.should <= max} end end context "clear_locks!" do before do @job = create_job(:locked_by => 'worker', :locked_at => @backend.db_time_now) end it "should clear locks for the given worker" do @backend.clear_locks!('worker') @backend.find_available('worker2', 5, 1.minute).should include(@job) end it "should not clear locks for other workers" do @backend.clear_locks!('worker1') @backend.find_available('worker1', 5, 1.minute).should_not include(@job) end end context "unlock" do before do @job = create_job(:locked_by => 'worker', :locked_at => @backend.db_time_now) end it "should clear locks" do @job.unlock @job.locked_by.should be_nil @job.locked_at.should be_nil end end context "large handler" do before do text = "Lorem ipsum dolor sit amet. " * 1000 @job = @backend.enqueue Delayed::PerformableMethod.new(text, :length, {}) end it "should have an id" do @job.id.should_not be_nil end end context "max_attempts" do before(:each) do @job = described_class.enqueue SimpleJob.new end it 'should not be defined' do @job.max_attempts.should be_nil end it 'should use the max_retries value on the payload when defined' do @job.payload_object.stub!(:max_attempts).and_return(99) @job.max_attempts.should == 99 end end describe "worker integration" do before do @worker = Delayed::Worker.new(:max_priority => nil, :min_priority => nil, :quiet => true) end describe "running a job" do context "when the job raises a deserialization error" do it "should mark the job as failed" do Delayed::Worker.destroy_failed_jobs = false job = described_class.create! :handler => "--- !ruby/object:JobThatDoesNotExist {}" @worker.work_off job.reload job.failed_at.should_not be_nil end end end end end
tongueroo/codebuild
lib/cody/cli/stop.rb
class Cody::CLI class Stop < Base def run check_build_id! run_with_exception_handling do codebuild.stop_build(id: build_id) puts "Build has been stopped: #{build_id}" end end end end
PedrV/fcup-ed
Exercices/week10/testMax_MaxSum.java
package week10; class MaxImple { // Maximo usando tail recursion public static int maxRec1 (int[] arr, int start, int end) { int max_so_far = arr[start]; if (start == end) return arr[end]; return Math.max(max_so_far, maxRec1(arr, start+1, end)); } // Maximo usando dividir para conquistar public static int maxRec2 (int[] arr, int start, int end) { if (start == end) return arr[end]; int middle = (start + end) / 2; int max_esq = maxRec2(arr, start, middle); int max_dir = maxRec2(arr, middle+1, end); return (Math.max(max_esq, max_dir)); } // Sum array usando tail recursion public static int sumArray1(int[] arr, int start, int end) { if (start == end) return arr[end]; return (arr[start] + sumArray1(arr, start+1, end)); } // Sum array usando dividir para conquistar public static int sumArray2(int[] arr, int start, int end) { if (start == end) return arr[end]; int middle = (start + end) / 2; return (sumArray2(arr, start, middle) + sumArray2(arr, middle+1, end)); } } public class testMax_MaxSum { public static void main (String [] args) { int[] array = {1,-2,34,4,12,42}; System.out.println(MaxImple.sumArray1(array, 0, array.length-1)); System.out.println(MaxImple.sumArray2(array, 0, array.length-1)); } }
h0tbird/clusterawsadm
providers/terraform-provider-aws/aws/resource_aws_wafv2_ip_set_test.go
<filename>providers/terraform-provider-aws/aws/resource_aws_wafv2_ip_set_test.go package aws import ( "fmt" "log" "regexp" "testing" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/wafv2" "github.com/hashicorp/go-multierror" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/terraform-providers/terraform-provider-aws/aws/internal/service/wafv2/lister" ) func init() { resource.AddTestSweepers("aws_wafv2_ip_set", &resource.Sweeper{ Name: "aws_wafv2_ip_set", F: testSweepWafv2IpSets, Dependencies: []string{ "aws_wafv2_rule_group", "aws_wafv2_web_acl", }, }) } func testSweepWafv2IpSets(region string) error { client, err := sharedClientForRegion(region) if err != nil { return fmt.Errorf("error getting client: %s", err) } conn := client.(*AWSClient).wafv2conn var sweeperErrs *multierror.Error input := &wafv2.ListIPSetsInput{ Scope: aws.String(wafv2.ScopeRegional), } err = lister.ListIPSetsPages(conn, input, func(page *wafv2.ListIPSetsOutput, lastPage bool) bool { if page == nil { return !lastPage } for _, ipSet := range page.IPSets { id := aws.StringValue(ipSet.Id) r := resourceAwsWafv2IPSet() d := r.Data(nil) d.SetId(id) d.Set("lock_token", ipSet.LockToken) d.Set("name", ipSet.Name) d.Set("scope", input.Scope) err := r.Delete(d, client) if err != nil { sweeperErr := fmt.Errorf("error deleting WAFv2 IP Set (%s): %w", id, err) log.Printf("[ERROR] %s", sweeperErr) sweeperErrs = multierror.Append(sweeperErrs, sweeperErr) continue } } return !lastPage }) if testSweepSkipSweepError(err) { log.Printf("[WARN] Skipping WAFv2 IP Set sweep for %s: %s", region, err) return sweeperErrs.ErrorOrNil() // In case we have completed some pages, but had errors } if err != nil { sweeperErrs = multierror.Append(sweeperErrs, fmt.Errorf("error describing WAFv2 IP Sets: %w", err)) } return sweeperErrs.ErrorOrNil() } func TestAccAwsWafv2IPSet_basic(t *testing.T) { var v wafv2.IPSet ipSetName := fmt.Sprintf("ip-set-%s", acctest.RandString(5)) resourceName := "aws_wafv2_ip_set.ip_set" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSWafv2ScopeRegional(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAWSWafv2IPSetDestroy, Steps: []resource.TestStep{ { Config: testAccAwsWafv2IPSetConfig(ipSetName), Check: resource.ComposeTestCheckFunc( testAccCheckAWSWafv2IPSetExists(resourceName, &v), testAccMatchResourceAttrRegionalARN(resourceName, "arn", "wafv2", regexp.MustCompile(`regional/ipset/.+$`)), resource.TestCheckResourceAttr(resourceName, "name", ipSetName), resource.TestCheckResourceAttr(resourceName, "description", ipSetName), resource.TestCheckResourceAttr(resourceName, "scope", wafv2.ScopeRegional), resource.TestCheckResourceAttr(resourceName, "ip_address_version", wafv2.IPAddressVersionIpv4), resource.TestCheckResourceAttr(resourceName, "addresses.#", "2"), resource.TestCheckResourceAttr(resourceName, "tags.%", "2"), resource.TestCheckResourceAttr(resourceName, "tags.Tag1", "Value1"), resource.TestCheckResourceAttr(resourceName, "tags.Tag2", "Value2"), ), }, { Config: testAccAwsWafv2IPSetConfigUpdate(ipSetName), Check: resource.ComposeTestCheckFunc( testAccCheckAWSWafv2IPSetExists(resourceName, &v), testAccMatchResourceAttrRegionalARN(resourceName, "arn", "wafv2", regexp.MustCompile(`regional/ipset/.+$`)), resource.TestCheckResourceAttr(resourceName, "name", ipSetName), resource.TestCheckResourceAttr(resourceName, "description", "Updated"), resource.TestCheckResourceAttr(resourceName, "scope", wafv2.ScopeRegional), resource.TestCheckResourceAttr(resourceName, "ip_address_version", wafv2.IPAddressVersionIpv4), resource.TestCheckResourceAttr(resourceName, "addresses.#", "3"), ), }, { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, ImportStateIdFunc: testAccAWSWafv2IPSetImportStateIdFunc(resourceName), }, }, }) } func TestAccAwsWafv2IPSet_Disappears(t *testing.T) { var r wafv2.IPSet ipSetName := fmt.Sprintf("ip-set-%s", acctest.RandString(5)) resourceName := "aws_wafv2_ip_set.ip_set" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSWafv2ScopeRegional(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAWSWafv2IPSetDestroy, Steps: []resource.TestStep{ { Config: testAccAwsWafv2IPSetConfig(ipSetName), Check: resource.ComposeTestCheckFunc( testAccCheckAWSWafv2IPSetExists(resourceName, &r), testAccCheckResourceDisappears(testAccProvider, resourceAwsWafv2IPSet(), resourceName), ), ExpectNonEmptyPlan: true, }, }, }) } func TestAccAwsWafv2IPSet_IPv6(t *testing.T) { var v wafv2.IPSet ipSetName := fmt.Sprintf("ip-set-%s", acctest.RandString(5)) resourceName := "aws_wafv2_ip_set.ip_set" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSWafv2ScopeRegional(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAWSWafv2IPSetDestroy, Steps: []resource.TestStep{ { Config: testAccAwsWafv2IPSetConfigIPv6(ipSetName), Check: resource.ComposeTestCheckFunc( testAccCheckAWSWafv2IPSetExists(resourceName, &v), testAccMatchResourceAttrRegionalARN(resourceName, "arn", "wafv2", regexp.MustCompile(`regional/ipset/.+$`)), resource.TestCheckResourceAttr(resourceName, "name", ipSetName), resource.TestCheckResourceAttr(resourceName, "description", ipSetName), resource.TestCheckResourceAttr(resourceName, "scope", wafv2.ScopeRegional), resource.TestCheckResourceAttr(resourceName, "ip_address_version", wafv2.IPAddressVersionIpv6), resource.TestCheckResourceAttr(resourceName, "addresses.#", "3"), resource.TestCheckTypeSetElemAttr(resourceName, "addresses.*", "1234:5678:9abc:6811:0000:0000:0000:0000/64"), resource.TestCheckTypeSetElemAttr(resourceName, "addresses.*", "2001:db8::/32"), resource.TestCheckTypeSetElemAttr(resourceName, "addresses.*", "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/128"), ), }, { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, ImportStateIdFunc: testAccAWSWafv2IPSetImportStateIdFunc(resourceName), }, }, }) } func TestAccAwsWafv2IPSet_Minimal(t *testing.T) { var v wafv2.IPSet ipSetName := fmt.Sprintf("ip-set-%s", acctest.RandString(5)) resourceName := "aws_wafv2_ip_set.ip_set" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSWafv2ScopeRegional(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAWSWafv2IPSetDestroy, Steps: []resource.TestStep{ { Config: testAccAwsWafv2IPSetConfigMinimal(ipSetName), Check: resource.ComposeTestCheckFunc( testAccCheckAWSWafv2IPSetExists(resourceName, &v), testAccMatchResourceAttrRegionalARN(resourceName, "arn", "wafv2", regexp.MustCompile(`regional/ipset/.+$`)), resource.TestCheckResourceAttr(resourceName, "name", ipSetName), resource.TestCheckResourceAttr(resourceName, "description", ""), resource.TestCheckResourceAttr(resourceName, "scope", wafv2.ScopeRegional), resource.TestCheckResourceAttr(resourceName, "ip_address_version", wafv2.IPAddressVersionIpv4), resource.TestCheckResourceAttr(resourceName, "addresses.#", "0"), ), }, { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, ImportStateIdFunc: testAccAWSWafv2IPSetImportStateIdFunc(resourceName), }, }, }) } func TestAccAwsWafv2IPSet_ChangeNameForceNew(t *testing.T) { var before, after wafv2.IPSet ipSetName := fmt.Sprintf("ip-set-%s", acctest.RandString(5)) ipSetNewName := fmt.Sprintf("ip-set-%s", acctest.RandString(5)) resourceName := "aws_wafv2_ip_set.ip_set" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSWafv2ScopeRegional(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAWSWafv2IPSetDestroy, Steps: []resource.TestStep{ { Config: testAccAwsWafv2IPSetConfig(ipSetName), Check: resource.ComposeTestCheckFunc( testAccCheckAWSWafv2IPSetExists(resourceName, &before), testAccMatchResourceAttrRegionalARN(resourceName, "arn", "wafv2", regexp.MustCompile(`regional/ipset/.+$`)), resource.TestCheckResourceAttr(resourceName, "name", ipSetName), resource.TestCheckResourceAttr(resourceName, "description", ipSetName), resource.TestCheckResourceAttr(resourceName, "scope", wafv2.ScopeRegional), resource.TestCheckResourceAttr(resourceName, "ip_address_version", wafv2.IPAddressVersionIpv4), resource.TestCheckResourceAttr(resourceName, "addresses.#", "2"), ), }, { Config: testAccAwsWafv2IPSetConfig(ipSetNewName), Check: resource.ComposeTestCheckFunc( testAccCheckAWSWafv2IPSetExists(resourceName, &after), testAccMatchResourceAttrRegionalARN(resourceName, "arn", "wafv2", regexp.MustCompile(`regional/ipset/.+$`)), resource.TestCheckResourceAttr(resourceName, "name", ipSetNewName), resource.TestCheckResourceAttr(resourceName, "description", ipSetNewName), resource.TestCheckResourceAttr(resourceName, "scope", wafv2.ScopeRegional), resource.TestCheckResourceAttr(resourceName, "ip_address_version", wafv2.IPAddressVersionIpv4), resource.TestCheckResourceAttr(resourceName, "addresses.#", "2"), ), }, }, }) } func TestAccAwsWafv2IPSet_Tags(t *testing.T) { var v wafv2.IPSet ipSetName := fmt.Sprintf("ip-set-%s", acctest.RandString(5)) resourceName := "aws_wafv2_ip_set.ip_set" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSWafv2ScopeRegional(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAWSWafv2IPSetDestroy, Steps: []resource.TestStep{ { Config: testAccAwsWafv2IPSetConfigOneTag(ipSetName, "Tag1", "Value1"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSWafv2IPSetExists(resourceName, &v), testAccMatchResourceAttrRegionalARN(resourceName, "arn", "wafv2", regexp.MustCompile(`regional/ipset/.+$`)), resource.TestCheckResourceAttr(resourceName, "tags.%", "1"), resource.TestCheckResourceAttr(resourceName, "tags.Tag1", "Value1"), ), }, { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, ImportStateIdFunc: testAccAWSWafv2IPSetImportStateIdFunc(resourceName), }, { Config: testAccAwsWafv2IPSetConfigTwoTags(ipSetName, "Tag1", "Value1Updated", "Tag2", "Value2"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSWafv2IPSetExists(resourceName, &v), testAccMatchResourceAttrRegionalARN(resourceName, "arn", "wafv2", regexp.MustCompile(`regional/ipset/.+$`)), resource.TestCheckResourceAttr(resourceName, "tags.%", "2"), resource.TestCheckResourceAttr(resourceName, "tags.Tag1", "Value1Updated"), resource.TestCheckResourceAttr(resourceName, "tags.Tag2", "Value2"), ), }, { Config: testAccAwsWafv2IPSetConfigOneTag(ipSetName, "Tag2", "Value2"), Check: resource.ComposeTestCheckFunc( testAccCheckAWSWafv2IPSetExists(resourceName, &v), testAccMatchResourceAttrRegionalARN(resourceName, "arn", "wafv2", regexp.MustCompile(`regional/ipset/.+$`)), resource.TestCheckResourceAttr(resourceName, "tags.%", "1"), resource.TestCheckResourceAttr(resourceName, "tags.Tag2", "Value2"), ), }, }, }) } func TestAccAwsWafv2IPSet_Large(t *testing.T) { var v wafv2.IPSet ipSetName := fmt.Sprintf("ip-set-%s", acctest.RandString(5)) resourceName := "aws_wafv2_ip_set.ip_set" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSWafv2ScopeRegional(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAWSWafv2IPSetDestroy, Steps: []resource.TestStep{ { Config: testAccAwsWafv2IPSetConfigLarge(ipSetName), Check: resource.ComposeTestCheckFunc( testAccCheckAWSWafv2IPSetExists(resourceName, &v), testAccMatchResourceAttrRegionalARN(resourceName, "arn", "wafv2", regexp.MustCompile(`regional/ipset/.+$`)), resource.TestCheckResourceAttr(resourceName, "name", ipSetName), resource.TestCheckResourceAttr(resourceName, "description", ipSetName), resource.TestCheckResourceAttr(resourceName, "scope", wafv2.ScopeRegional), resource.TestCheckResourceAttr(resourceName, "ip_address_version", wafv2.IPAddressVersionIpv4), resource.TestCheckResourceAttr(resourceName, "addresses.#", "50"), ), }, { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, ImportStateIdFunc: testAccAWSWafv2IPSetImportStateIdFunc(resourceName), }, }, }) } func testAccCheckAWSWafv2IPSetDestroy(s *terraform.State) error { for _, rs := range s.RootModule().Resources { if rs.Type != "aws_wafv2_ip_set" { continue } conn := testAccProvider.Meta().(*AWSClient).wafv2conn resp, err := conn.GetIPSet( &wafv2.GetIPSetInput{ Id: aws.String(rs.Primary.ID), Name: aws.String(rs.Primary.Attributes["name"]), Scope: aws.String(rs.Primary.Attributes["scope"]), }) if err == nil { if resp == nil || resp.IPSet == nil { return fmt.Errorf("Error getting WAFv2 IPSet") } if aws.StringValue(resp.IPSet.Id) == rs.Primary.ID { return fmt.Errorf("WAFv2 IPSet %s still exists", rs.Primary.ID) } return nil } // Return nil if the IPSet is already destroyed if isAWSErr(err, wafv2.ErrCodeWAFNonexistentItemException, "") { return nil } return err } return nil } func testAccCheckAWSWafv2IPSetExists(n string, v *wafv2.IPSet) resource.TestCheckFunc { return func(s *terraform.State) error { rs, ok := s.RootModule().Resources[n] if !ok { return fmt.Errorf("Not found: %s", n) } if rs.Primary.ID == "" { return fmt.Errorf("No WAFv2 IPSet ID is set") } conn := testAccProvider.Meta().(*AWSClient).wafv2conn resp, err := conn.GetIPSet(&wafv2.GetIPSetInput{ Id: aws.String(rs.Primary.ID), Name: aws.String(rs.Primary.Attributes["name"]), Scope: aws.String(rs.Primary.Attributes["scope"]), }) if err != nil { return err } if resp == nil || resp.IPSet == nil { return fmt.Errorf("Error getting WAFv2 IPSet") } if aws.StringValue(resp.IPSet.Id) == rs.Primary.ID { *v = *resp.IPSet return nil } return fmt.Errorf("WAFv2 IPSet (%s) not found", rs.Primary.ID) } } func testAccAwsWafv2IPSetConfig(name string) string { return fmt.Sprintf(` resource "aws_wafv2_ip_set" "ip_set" { name = "%s" description = "%s" scope = "REGIONAL" ip_address_version = "IPV4" addresses = ["1.2.3.4/32", "5.6.7.8/32"] tags = { Tag1 = "Value1" Tag2 = "Value2" } } `, name, name) } func testAccAwsWafv2IPSetConfigUpdate(name string) string { return fmt.Sprintf(` resource "aws_wafv2_ip_set" "ip_set" { name = "%s" description = "Updated" scope = "REGIONAL" ip_address_version = "IPV4" addresses = ["1.1.1.1/32", "2.2.2.2/32", "3.3.3.3/32"] } `, name) } func testAccAwsWafv2IPSetConfigIPv6(name string) string { return fmt.Sprintf(` resource "aws_wafv2_ip_set" "ip_set" { name = "%s" description = "%s" scope = "REGIONAL" ip_address_version = "IPV6" addresses = [ "fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b/128", "1234:5678:9abc:6811:0000:0000:0000:0000/64", "2001:db8::/32" ] } `, name, name) } func testAccAwsWafv2IPSetConfigMinimal(name string) string { return fmt.Sprintf(` resource "aws_wafv2_ip_set" "ip_set" { name = "%s" scope = "REGIONAL" ip_address_version = "IPV4" } `, name) } func testAccAwsWafv2IPSetConfigOneTag(name, tagKey, tagValue string) string { return fmt.Sprintf(` resource "aws_wafv2_ip_set" "ip_set" { name = "%s" description = "%s" scope = "REGIONAL" ip_address_version = "IPV4" addresses = ["1.2.3.4/32", "5.6.7.8/32"] tags = { "%s" = "%s" } } `, name, name, tagKey, tagValue) } func testAccAwsWafv2IPSetConfigTwoTags(name, tag1Key, tag1Value, tag2Key, tag2Value string) string { return fmt.Sprintf(` resource "aws_wafv2_ip_set" "ip_set" { name = "%s" description = "%s" scope = "REGIONAL" ip_address_version = "IPV4" addresses = ["1.2.3.4/32", "5.6.7.8/32"] tags = { "%s" = "%s" "%s" = "%s" } } `, name, name, tag1Key, tag1Value, tag2Key, tag2Value) } func testAccAwsWafv2IPSetConfigLarge(name string) string { return fmt.Sprintf(` resource "aws_wafv2_ip_set" "ip_set" { name = "%s" description = "%s" scope = "REGIONAL" ip_address_version = "IPV4" addresses = [ "172.16.58.3/32", "172.16.58.3/32", "1.1.1.15/32", "2.2.2.30/32", "1.1.1.38/32", "2.2.2.53/32", "1.1.1.21/32", "2.2.2.24/32", "1.1.1.44/32", "1.1.1.1/32", "1.1.1.67/32", "2.2.2.76/32", "2.2.2.99/32", "1.1.1.26/32", "2.2.2.93/32", "2.2.2.64/32", "1.1.1.32/32", "2.2.2.12/32", "2.2.2.47/32", "1.1.1.91/32", "1.1.1.78/32", "2.2.2.82/32", "2.2.2.58/32", "1.1.1.85/32", "2.2.2.4/32", "2.2.2.65/32", "2.2.2.23/32", "2.2.2.17/32", "2.2.2.42/32", "1.1.1.56/32", "1.1.1.79/32", "2.2.2.81/32", "2.2.2.36/32", "2.2.2.59/32", "2.2.2.9/32", "1.1.1.7/32", "1.1.1.84/32", "1.1.1.51/32", "2.2.2.70/32", "2.2.2.87/32", "1.1.1.39/32", "1.1.1.90/32", "2.2.2.31/32", "1.1.1.62/32", "1.1.1.14/32", "1.1.1.20/32", "2.2.2.25/32", "1.1.1.45/32", "1.1.1.2/32", "2.2.2.98/32" ] } `, name, name) } func testAccAWSWafv2IPSetImportStateIdFunc(resourceName string) resource.ImportStateIdFunc { return func(s *terraform.State) (string, error) { rs, ok := s.RootModule().Resources[resourceName] if !ok { return "", fmt.Errorf("Not found: %s", resourceName) } return fmt.Sprintf("%s/%s/%s", rs.Primary.ID, rs.Primary.Attributes["name"], rs.Primary.Attributes["scope"]), nil } }
randolphwong/mcsema
boost/tools/build/CMake/test/libs/d/src/main.cpp
<filename>boost/tools/build/CMake/test/libs/d/src/main.cpp #include <boost/preprocessor/stringize.hpp> #include <string> #include <iostream> int main(int argc, char** argv) { std::cout << BOOST_PP_STRINGIZE(LIBNAME) << "-" << BOOST_PP_STRINGIZE(TOPLEVEL_SHARED_OR_STATIC) << "_" << BOOST_PP_STRINGIZE(TOPLEVEL_DEBUG_OR_RELEASE) << "_" << BOOST_PP_STRINGIZE(TOPLEVEL_MULTI_OR_SINGLE) << "\n"; }
leonardt/magma
magma/syntax/__init__.py
<gh_stars>100-1000 from magma.syntax.combinational import combinational try: import kratos from magma.syntax.verilog import build_kratos_debug_info except ImportError: pass
groboclown/p4ic4idea
p4java/r19-1/src/test/java/com/perforce/p4java/tests/dev/unit/bug/r121/SyncRmDirTest.java
/** * */ package com.perforce.p4java.tests.dev.unit.bug.r121; import com.perforce.p4java.client.IClient; import com.perforce.p4java.client.IClientSummary; import com.perforce.p4java.core.file.FileSpecBuilder; import com.perforce.p4java.core.file.IFileSpec; import com.perforce.p4java.exception.P4JavaException; import com.perforce.p4java.impl.generic.client.ClientOptions; import com.perforce.p4java.option.client.SyncOptions; import com.perforce.p4java.tests.SimpleServerRule; import com.perforce.p4java.tests.dev.annotations.Jobs; import com.perforce.p4java.tests.dev.annotations.TestId; import com.perforce.p4java.tests.dev.unit.P4JavaRshTestCase; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import java.io.File; import java.util.List; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Test the sync testdir/...#0 with client option "rmdir". This should remove * the upstream empty directories (up to the client's root). */ @Jobs({ "job052977" }) @TestId("Dev112_SyncSafetyCheckTest") public class SyncRmDirTest extends P4JavaRshTestCase { @ClassRule public static SimpleServerRule p4d = new SimpleServerRule("r16.1", SyncRmDirTest.class.getSimpleName()); IClient client = null; /** * @Before annotation to a method to be run before each test in a class. */ @Before public void setUp() { // initialization code (before each test). try { setupServer(p4d.getRSHURL(), userName, password, true, props); client = getClient(server); createTextFileOnServer(client, "112Dev/GetOpenedFilesTest/src/com/perforce/branch11136/file1.txt", "desc"); createTextFileOnServer(client, "112Dev/GetOpenedFilesTest/src/com/perforce/branch11136/file2.txt", "desc"); } catch (Exception e) { fail("Unexpected exception: " + e.getLocalizedMessage()); } } /** * @After annotation to a method to be run after each test in a class. */ @After public void tearDown() { // cleanup code (after each test). if (server != null) { this.endServerSession(server); } } /** * Test the sync testdir/...#0 with client option "rmdir". This should * remove the upstream empty directories (up to the client's root). */ @Test public void testSyncFilesRmDir() { // Relative path String relativePath = "112Dev/GetOpenedFilesTest/src/com/perforce/branch11136"; // The parent directory File parentDir = new File(client.getRoot() + File.separator + relativePath); List<IFileSpec> files = null; try { // Sync to head files = client.sync( FileSpecBuilder.makeFileSpecList("//depot/" + relativePath + "/..."), new SyncOptions().setForceUpdate(true)); assertNotNull(files); // Check for existing of parent directory assertTrue(parentDir.exists()); // Sync to #0 with client option "rmdir" IClientSummary.IClientOptions clientOptions = new ClientOptions(false, false, false, false, false, true); client.setOptions(clientOptions); client.update(); client = getClient(server); assertTrue(client.getOptions().isRmdir()); files = client .sync(FileSpecBuilder.makeFileSpecList("//depot/" + relativePath + "/...#0"), new SyncOptions().setForceUpdate(true)); assertNotNull(files); // Check directories are deleted, not exist. assertFalse(parentDir.exists()); } catch (Exception exc) { fail("Unexpected exception: " + exc.getLocalizedMessage()); } finally { if (client != null) { try { files = client.sync( FileSpecBuilder.makeFileSpecList("//depot/" + relativePath + "/..."), new SyncOptions().setForceUpdate(true)); cleanupFiles(client); } catch (Exception e) { e.printStackTrace(); } } } } }
vsoch/shroud
regression/reference/example/pyexample_nested_ExClass1type.cpp
<filename>regression/reference/example/pyexample_nested_ExClass1type.cpp // pyexample_nested_ExClass1type.cpp // This file is generated by Shroud nowrite-version. Do not edit. // Copyright (c) 2017-2021, Lawrence Livermore National Security, LLC and // other Shroud Project Developers. // See the top-level COPYRIGHT file for details. // // SPDX-License-Identifier: (BSD-3-Clause) // #include "pyUserLibrarymodule.hpp" // splicer begin namespace.example::nested.class.ExClass1.impl.include // splicer end namespace.example::nested.class.ExClass1.impl.include #ifdef __cplusplus #define SHROUD_UNUSED(param) #else #define SHROUD_UNUSED(param) param #endif #if PY_MAJOR_VERSION >= 3 #define PyInt_AsLong PyLong_AsLong #define PyInt_FromLong PyLong_FromLong #define PyInt_FromSize_t PyLong_FromSize_t #define PyString_FromString PyUnicode_FromString #define PyString_FromStringAndSize PyUnicode_FromStringAndSize #endif // splicer begin namespace.example::nested.class.ExClass1.impl.C_definition // splicer end namespace.example::nested.class.ExClass1.impl.C_definition // splicer begin namespace.example::nested.class.ExClass1.impl.additional_methods // splicer end namespace.example::nested.class.ExClass1.impl.additional_methods static PyObject * PP_ExClass1_tp_repr (PP_ExClass1 *self) { // splicer begin namespace.example::nested.class.ExClass1.type.repr PyErr_SetString(PyExc_NotImplementedError, "repr"); return nullptr; // splicer end namespace.example::nested.class.ExClass1.type.repr } static void PP_ExClass1_tp_del (PP_ExClass1 *self) { // splicer begin namespace.example::nested.class.ExClass1.type.del PP_SHROUD_release_memory(self->idtor, self->obj); self->obj = nullptr; // splicer end namespace.example::nested.class.ExClass1.type.del } static PyObject * PP_ExClass1_tp_richcompare (PP_ExClass1 *self, PyObject *other, int opid) { // splicer begin namespace.example::nested.class.ExClass1.type.richcompare Py_INCREF(Py_NotImplemented); return Py_NotImplemented; // splicer end namespace.example::nested.class.ExClass1.type.richcompare } // ---------------------------------------- // Function: ExClass1 // Attrs: +intent(result) // Exact: py_default static int PP_ExClass1_tp_init_0( PP_ExClass1 *self, PyObject *SHROUD_UNUSED(args), PyObject *SHROUD_UNUSED(kwds)) { // splicer begin namespace.example::nested.class.ExClass1.method.ctor_0 self->obj = new example::nested::ExClass1(); if (self->obj == nullptr) { PyErr_NoMemory(); return -1; } self->idtor = 1; return 0; // splicer end namespace.example::nested.class.ExClass1.method.ctor_0 } // ---------------------------------------- // Function: ExClass1 // Attrs: +intent(result) // Exact: py_default // ---------------------------------------- // Argument: const string * name // Attrs: +intent(in) // Exact: py_string_*_in /** * \brief constructor * * longer description * usually multiple lines * * \return return new instance */ static int PP_ExClass1_tp_init_1( PP_ExClass1 *self, PyObject *args, PyObject *kwds) { // splicer begin namespace.example::nested.class.ExClass1.method.ctor_1 char * name; const char *SHT_kwlist[] = { "name", nullptr }; if (!PyArg_ParseTupleAndKeywords(args, kwds, "s:ctor", const_cast<char **>(SHT_kwlist), &name)) return -1; // post_declare const std::string SH_name(name); self->obj = new example::nested::ExClass1(&SH_name); if (self->obj == nullptr) { PyErr_NoMemory(); return -1; } self->idtor = 1; return 0; // splicer end namespace.example::nested.class.ExClass1.method.ctor_1 } // ---------------------------------------- // Function: int incrementCount // Attrs: +intent(result) // Requested: py_native_scalar_result // Match: py_default // ---------------------------------------- // Argument: int incr +value // Attrs: +intent(in) // Requested: py_native_scalar_in // Match: py_default static char PP_incrementCount__doc__[] = "documentation" ; static PyObject * PP_incrementCount( PP_ExClass1 *self, PyObject *args, PyObject *kwds) { // splicer begin namespace.example::nested.class.ExClass1.method.increment_count int incr; const char *SHT_kwlist[] = { "incr", nullptr }; PyObject * SHTPy_rv = nullptr; if (!PyArg_ParseTupleAndKeywords(args, kwds, "i:incrementCount", const_cast<char **>(SHT_kwlist), &incr)) return nullptr; int SHCXX_rv = self->obj->incrementCount(incr); // post_call SHTPy_rv = PyInt_FromLong(SHCXX_rv); return (PyObject *) SHTPy_rv; // splicer end namespace.example::nested.class.ExClass1.method.increment_count } // ---------------------------------------- // Function: const string & getNameErrorCheck +deref(allocatable) // Attrs: +deref(allocatable)+intent(result) // Exact: py_string_&_result static char PP_getNameErrorCheck__doc__[] = "documentation" ; static PyObject * PP_getNameErrorCheck( PP_ExClass1 *self, PyObject *SHROUD_UNUSED(args), PyObject *SHROUD_UNUSED(kwds)) { // splicer begin namespace.example::nested.class.ExClass1.method.get_name_error_check PyObject * SHTPy_rv = nullptr; const std::string & SHCXX_rv = self->obj->getNameErrorCheck(); // post_call SHTPy_rv = PyString_FromStringAndSize(SHCXX_rv.data(), SHCXX_rv.size()); return (PyObject *) SHTPy_rv; // splicer end namespace.example::nested.class.ExClass1.method.get_name_error_check } // ---------------------------------------- // Function: const string & getNameArg +deref(result-as-arg) // Attrs: +deref(result-as-arg)+intent(result) // Exact: py_string_&_result static char PP_getNameArg__doc__[] = "documentation" ; static PyObject * PP_getNameArg( PP_ExClass1 *self, PyObject *SHROUD_UNUSED(args), PyObject *SHROUD_UNUSED(kwds)) { // splicer begin namespace.example::nested.class.ExClass1.method.get_name_arg PyObject * SHTPy_rv = nullptr; const std::string & SHCXX_rv = self->obj->getNameArg(); // post_call SHTPy_rv = PyString_FromStringAndSize(SHCXX_rv.data(), SHCXX_rv.size()); return (PyObject *) SHTPy_rv; // splicer end namespace.example::nested.class.ExClass1.method.get_name_arg } // ---------------------------------------- // Function: int getValue // Attrs: +intent(result) // Requested: py_native_scalar_result // Match: py_default // ---------------------------------------- // Argument: int value +value // Attrs: +intent(in) // Requested: py_native_scalar_in // Match: py_default static PyObject * PP_getValue_from_int( PP_ExClass1 *self, PyObject *args, PyObject *kwds) { // splicer begin namespace.example::nested.class.ExClass1.method.get_value_from_int int value; const char *SHT_kwlist[] = { "value", nullptr }; PyObject * SHTPy_rv = nullptr; if (!PyArg_ParseTupleAndKeywords(args, kwds, "i:getValue", const_cast<char **>(SHT_kwlist), &value)) return nullptr; int SHCXX_rv = self->obj->getValue(value); // post_call SHTPy_rv = PyInt_FromLong(SHCXX_rv); return (PyObject *) SHTPy_rv; // splicer end namespace.example::nested.class.ExClass1.method.get_value_from_int } // ---------------------------------------- // Function: long getValue // Attrs: +intent(result) // Requested: py_native_scalar_result // Match: py_default // ---------------------------------------- // Argument: long value +value // Attrs: +intent(in) // Requested: py_native_scalar_in // Match: py_default static PyObject * PP_getValue_1( PP_ExClass1 *self, PyObject *args, PyObject *kwds) { // splicer begin namespace.example::nested.class.ExClass1.method.get_value_1 long value; const char *SHT_kwlist[] = { "value", nullptr }; PyObject * SHTPy_rv = nullptr; if (!PyArg_ParseTupleAndKeywords(args, kwds, "l:getValue", const_cast<char **>(SHT_kwlist), &value)) return nullptr; long SHCXX_rv = self->obj->getValue(value); // post_call SHTPy_rv = PyInt_FromLong(SHCXX_rv); return (PyObject *) SHTPy_rv; // splicer end namespace.example::nested.class.ExClass1.method.get_value_1 } // ---------------------------------------- // Function: bool hasAddr // Attrs: +intent(result) // Requested: py_bool_scalar_result // Match: py_bool_result // ---------------------------------------- // Argument: bool in +value // Attrs: +intent(in) // Requested: py_bool_scalar_in // Match: py_bool_in static char PP_hasAddr__doc__[] = "documentation" ; static PyObject * PP_hasAddr( PP_ExClass1 *self, PyObject *args, PyObject *kwds) { // splicer begin namespace.example::nested.class.ExClass1.method.has_addr bool in; PyObject * SHPy_in; const char *SHT_kwlist[] = { "in", nullptr }; PyObject * SHTPy_rv = nullptr; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O!:hasAddr", const_cast<char **>(SHT_kwlist), &PyBool_Type, &SHPy_in)) return nullptr; // pre_call in = PyObject_IsTrue(SHPy_in); bool SHCXX_rv = self->obj->hasAddr(in); // post_call SHTPy_rv = PyBool_FromLong(SHCXX_rv); if (SHTPy_rv == nullptr) goto fail; return (PyObject *) SHTPy_rv; fail: Py_XDECREF(SHTPy_rv); return nullptr; // splicer end namespace.example::nested.class.ExClass1.method.has_addr } // ---------------------------------------- // Function: void SplicerSpecial // Exact: py_default static char PP_SplicerSpecial__doc__[] = "documentation" ; static PyObject * PP_SplicerSpecial( PP_ExClass1 *self, PyObject *SHROUD_UNUSED(args), PyObject *SHROUD_UNUSED(kwds)) { // splicer begin namespace.example::nested.class.ExClass1.method.splicer_special self->obj->SplicerSpecial(); Py_RETURN_NONE; // splicer end namespace.example::nested.class.ExClass1.method.splicer_special } static int PP_ExClass1_tp_init( PP_ExClass1 *self, PyObject *args, PyObject *kwds) { // splicer begin namespace.example::nested.class.ExClass1.method.ctor Py_ssize_t SHT_nargs = 0; if (args != nullptr) SHT_nargs += PyTuple_Size(args); if (kwds != nullptr) SHT_nargs += PyDict_Size(args); int rv; if (SHT_nargs == 0) { rv = PP_ExClass1_tp_init_0(self, args, kwds); if (!PyErr_Occurred()) { return rv; } else if (! PyErr_ExceptionMatches(PyExc_TypeError)) { return rv; } PyErr_Clear(); } if (SHT_nargs == 1) { rv = PP_ExClass1_tp_init_1(self, args, kwds); if (!PyErr_Occurred()) { return rv; } else if (! PyErr_ExceptionMatches(PyExc_TypeError)) { return rv; } PyErr_Clear(); } PyErr_SetString(PyExc_TypeError, "wrong arguments multi-dispatch"); return -1; // splicer end namespace.example::nested.class.ExClass1.method.ctor } static char PP_getValue__doc__[] = "documentation" ; static PyObject * PP_getValue( PP_ExClass1 *self, PyObject *args, PyObject *kwds) { // splicer begin namespace.example::nested.class.ExClass1.method.get_value Py_ssize_t SHT_nargs = 0; if (args != nullptr) SHT_nargs += PyTuple_Size(args); if (kwds != nullptr) SHT_nargs += PyDict_Size(args); PyObject *rvobj; if (SHT_nargs == 1) { rvobj = PP_getValue_from_int(self, args, kwds); if (!PyErr_Occurred()) { return rvobj; } else if (! PyErr_ExceptionMatches(PyExc_TypeError)) { return rvobj; } PyErr_Clear(); } if (SHT_nargs == 1) { rvobj = PP_getValue_1(self, args, kwds); if (!PyErr_Occurred()) { return rvobj; } else if (! PyErr_ExceptionMatches(PyExc_TypeError)) { return rvobj; } PyErr_Clear(); } PyErr_SetString(PyExc_TypeError, "wrong arguments multi-dispatch"); return nullptr; // splicer end namespace.example::nested.class.ExClass1.method.get_value } // splicer begin namespace.example::nested.class.ExClass1.impl.after_methods // splicer end namespace.example::nested.class.ExClass1.impl.after_methods static PyMethodDef PP_ExClass1_methods[] = { {"incrementCount", (PyCFunction)PP_incrementCount, METH_VARARGS|METH_KEYWORDS, PP_incrementCount__doc__}, {"getNameErrorCheck", (PyCFunction)PP_getNameErrorCheck, METH_NOARGS, PP_getNameErrorCheck__doc__}, {"getNameArg", (PyCFunction)PP_getNameArg, METH_NOARGS, PP_getNameArg__doc__}, {"hasAddr", (PyCFunction)PP_hasAddr, METH_VARARGS|METH_KEYWORDS, PP_hasAddr__doc__}, {"SplicerSpecial", (PyCFunction)PP_SplicerSpecial, METH_NOARGS, PP_SplicerSpecial__doc__}, {"getValue", (PyCFunction)PP_getValue, METH_VARARGS|METH_KEYWORDS, PP_getValue__doc__}, // splicer begin namespace.example::nested.class.ExClass1.PyMethodDef // splicer end namespace.example::nested.class.ExClass1.PyMethodDef {nullptr, (PyCFunction)nullptr, 0, nullptr} /* sentinel */ }; static char ExClass1__doc__[] = "virtual class" ; /* static */ PyTypeObject PP_ExClass1_Type = { PyVarObject_HEAD_INIT(nullptr, 0) "userlibrary.example.nested.ExClass1", /* tp_name */ sizeof(PP_ExClass1), /* tp_basicsize */ 0, /* tp_itemsize */ /* Methods to implement standard operations */ (destructor)nullptr, /* tp_dealloc */ (printfunc)nullptr, /* tp_print */ (getattrfunc)nullptr, /* tp_getattr */ (setattrfunc)nullptr, /* tp_setattr */ #if PY_MAJOR_VERSION >= 3 nullptr, /* tp_reserved */ #else (cmpfunc)nullptr, /* tp_compare */ #endif (reprfunc)PP_ExClass1_tp_repr, /* tp_repr */ /* Method suites for standard classes */ nullptr, /* tp_as_number */ nullptr, /* tp_as_sequence */ nullptr, /* tp_as_mapping */ /* More standard operations (here for binary compatibility) */ (hashfunc)nullptr, /* tp_hash */ (ternaryfunc)nullptr, /* tp_call */ (reprfunc)nullptr, /* tp_str */ (getattrofunc)nullptr, /* tp_getattro */ (setattrofunc)nullptr, /* tp_setattro */ /* Functions to access object as input/output buffer */ nullptr, /* tp_as_buffer */ /* Flags to define presence of optional/expanded features */ Py_TPFLAGS_DEFAULT, /* tp_flags */ ExClass1__doc__, /* tp_doc */ /* Assigned meaning in release 2.0 */ /* call function for all accessible objects */ (traverseproc)nullptr, /* tp_traverse */ /* delete references to contained objects */ (inquiry)nullptr, /* tp_clear */ /* Assigned meaning in release 2.1 */ /* rich comparisons */ (richcmpfunc)PP_ExClass1_tp_richcompare, /* tp_richcompare */ /* weak reference enabler */ 0, /* tp_weaklistoffset */ /* Added in release 2.2 */ /* Iterators */ (getiterfunc)nullptr, /* tp_iter */ (iternextfunc)nullptr, /* tp_iternext */ /* Attribute descriptor and subclassing stuff */ PP_ExClass1_methods, /* tp_methods */ nullptr, /* tp_members */ nullptr, /* tp_getset */ nullptr, /* tp_base */ nullptr, /* tp_dict */ (descrgetfunc)nullptr, /* tp_descr_get */ (descrsetfunc)nullptr, /* tp_descr_set */ 0, /* tp_dictoffset */ (initproc)PP_ExClass1_tp_init, /* tp_init */ (allocfunc)nullptr, /* tp_alloc */ (newfunc)nullptr, /* tp_new */ (freefunc)nullptr, /* tp_free */ (inquiry)nullptr, /* tp_is_gc */ nullptr, /* tp_bases */ nullptr, /* tp_mro */ nullptr, /* tp_cache */ nullptr, /* tp_subclasses */ nullptr, /* tp_weaklist */ (destructor)PP_ExClass1_tp_del, /* tp_del */ 0, /* tp_version_tag */ #if PY_MAJOR_VERSION >= 3 (destructor)nullptr, /* tp_finalize */ #endif };
lechium/tvOS10Headers
System/Library/PrivateFrameworks/CameraKit.framework/CMKFocusLockView.h
/* * This header is generated by classdump-dyld 1.0 * on Wednesday, March 22, 2017 at 9:06:19 AM Mountain Standard Time * Operating System: Version 10.1 (Build 14U593) * Image Source: /System/Library/PrivateFrameworks/CameraKit.framework/CameraKit * classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. */ #import <CameraKit/CMKFocusView.h> @class CAKeyframeAnimation; @interface CMKFocusLockView : CMKFocusView { CAKeyframeAnimation* _lockBoundsAnimation; } -(double)fadeInDuration; -(void)startAnimatingContents:(BOOL)arg1 bounds:(BOOL)arg2 fadeIn:(BOOL)arg3 ; -(id)_createBoundsAnimation; -(id)_createLockBoundsAnimation; -(void)animateLock; -(void)animateScaleDownWithCompletion:(/*^block*/id)arg1 ; @end
unclazz/jp1ajs2.unitdef
src/main/java/org/unclazz/jp1ajs2/unitdef/query/ListQuery.java
<reponame>unclazz/jp1ajs2.unitdef<filename>src/main/java/org/unclazz/jp1ajs2/unitdef/query/ListQuery.java package org.unclazz.jp1ajs2.unitdef.query; import java.util.List; import java.util.NoSuchElementException; /** * 問合せ結果としてリストを返すクエリ. * * @param <T> 問合せ対象の型 * @param <U> 問合せ結果リストの要素型 */ public interface ListQuery<T, U> extends Query<T, List<U>> { /** * リストの最初の要素を問合せるクエリを返す. * <p>クエリは要素が1つもないとき{@link NoSuchElementException}をスローする。</p> * @return クエリ * @throws NoSuchElementException リストの要素数が0の場合 */ Query<T, U> first(); /** * リストの最初の要素を問合せるクエリを返す. * <p>クエリは要素が1つもないとき{@link NoSuchElementException}をスローする。</p> * @param nullale {@code true}の場合 クエリは要素が1つもないとき例外スローをせず{@code null}を返す * @return クエリ */ Query<T, U> first(boolean nullale); /** * リストの最初の要素を問合せるクエリを返す. * <p>クエリは要素が1つもないときデフォルト値を返す。</p> * @param defaultValue デフォルト値 * @return クエリ */ Query<T, U> first(U defaultValue); /** * リストの最後の要素を問合せるクエリを返す. * <p>クエリは要素が1つもないとき{@link NoSuchElementException}をスローする。</p> * @return クエリ */ Query<T, U> last(); /** * リストの最後の要素を問合せるクエリを返す. * <p>クエリは要素が1つもないとき{@link NoSuchElementException}をスローする。</p> * @param nullale {@code true}の場合 クエリは要素が1つもないとき例外スローをせず{@code null}を返す * @return クエリ */ Query<T, U> last(boolean nullale); /** * リストの最後の要素を問合せるクエリを返す. * <p>クエリは要素が1つもないときデフォルト値を返す。</p> * @param defaultValue デフォルト値 * @return クエリ */ Query<T, U> last(U defaultValue); }
hoyeungw/glossa
packages/i18n/i18n-fin-ntes/index.js
import { balances } from './resources/balances' import { basics } from './resources/basics' import { cashflows } from './resources/cashflows' import { incomes } from './resources/incomes' export { ChsToEng } from './src/chsToEng' export { DictCollection } from './src/DictCollection' export { balances, incomes, cashflows, basics, }
fxiao/gitlab
ee/app/helpers/ee/trial_helper.rb
# frozen_string_literal: true module EE module TrialHelper def company_size_options_for_select(selected = 0) options_for_select([ [_('Please select'), 0], ['1 - 99', '1-99'], ['100 - 499', '100-499'], ['500 - 1,999', '500-1,999'], ['2,000 - 9,999', '2,000-9,999'], ['10,000 +', '10,000+'] ], selected) end def namespace_options_for_select(selected = nil) groups = current_user.manageable_groups.map { |g| [g.name, g.id] } users = [[current_user.namespace.name, current_user.namespace_id]] grouped_options = { 'New' => [[_('Create group'), 0]], 'Groups' => groups, 'Users' => users } grouped_options_for_select(grouped_options, selected, prompt: _('Please select')) end def show_trial_errors?(namespace, service_result) namespace&.invalid? || (service_result && !service_result[:success]) end def trial_errors(namespace, service_result) namespace&.errors&.full_messages&.to_sentence&.presence || service_result&.dig(:errors)&.presence end end end
SehrishHussain/aqa-tests
functional/MBCS_Tests/locale_matching/src/LocaleFilterTest1.java
/******************************************************************************* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ import java.util.*; import java.io.*; public class LocaleFilterTest1 { public static void main(String[] args) throws Exception { PrintStream sysOut = System.out; ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos); System.setOut(ps); ArrayList<Locale.LanguageRange> list = new ArrayList<>(); Vector<Locale> collection = new Vector<>(); Locale loc = Locale.getDefault(); for(Locale locale : Locale.getAvailableLocales()) collection.add(locale); collection.add(Locale.forLanguageTag("ja-Jpan-JP")); collection.add(Locale.forLanguageTag("ko-Hang-KR")); collection.add(Locale.forLanguageTag("zh-Hans-CN")); collection.add(Locale.forLanguageTag("zh-Hant-TW")); Collections.sort(collection, new Comparator<Locale>(){ public int compare(Locale l1, Locale l2) { return l1.toString().compareTo(l2.toString()); } }); List<Locale> filter = null; String range = loc.getLanguage()+"-"+loc.getCountry(); System.out.println("Language Priority List: "+range); list.add(new Locale.LanguageRange(range)); for(Locale.FilteringMode mode : Locale.FilteringMode.values()) { System.out.println(" "+mode.toString()); filter = Locale.filter(list, collection, mode); for(Object obj : filter) System.out.println(" "+obj.toString()); } list.clear(); range = loc.getLanguage()+"-*-"+loc.getCountry(); System.out.println("Language Priority List: "+range); list.add(new Locale.LanguageRange(range)); for(Locale.FilteringMode mode : Locale.FilteringMode.values()) { System.out.println(" "+mode.toString()); try { filter = Locale.filter(list, collection, mode); for(Object obj : filter) System.out.println(" "+obj.toString()); } catch (IllegalArgumentException iae) { //iae.printStackTrace(System.out); System.out.println(" "+iae.getClass().getName()+": "+iae.getMessage()); } } ps.close(); baos.close(); System.setOut(sysOut); String outString = baos.toString(); if (Boolean.valueOf(System.getProperty("result","true"))) System.out.print(outString); Properties prop = new Properties(); String suffix = ""; long ver = JavaVersion.getVersion(); if (ver >= 16000000L) suffix = "_16"; prop.load(LocaleFilterTest1.class. getResourceAsStream("LocaleFilterTest1"+suffix+".properties")); String expected = String.format(prop.getProperty(loc.toString())); if (Boolean.valueOf(System.getProperty("expected","false"))) System.out.printf("--- expected ---%n"+expected); System.out.println("Test: "+(expected.equals(outString) ? "Passed" : "Failed")); } }
Openbeats/openbeats
services/core/src/core/ytsearchcat.js
<filename>services/core/src/core/ytsearchcat.js<gh_stars>1-10 import fetch from "node-fetch"; // function to perform null check on the parameters const jsonNullCheckResponse = (parameterName, currentSongObj) => { try { switch (parameterName) { case "title": return currentSongObj["title"]["runs"][0]["text"]; case "allSongThumbnails": return currentSongObj["thumbnail"]["thumbnails"]; case "songThumbnail": let allSongThumbnails = currentSongObj["thumbnail"]["thumbnails"]; return currentSongObj["thumbnail"]["thumbnails"][allSongThumbnails.length - 1]["url"]; case "songDuration": return currentSongObj["lengthText"]["simpleText"]; case "songId": return currentSongObj["videoId"]; case "channelName": return currentSongObj["ownerText"]["runs"][0]["text"]; case "channelId": return currentSongObj["ownerText"]["runs"][0]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"]; case "uploadedOn": return currentSongObj["publishedTimeText"]["simpleText"]; case "views": return currentSongObj["viewCountText"]["simpleText"]; case "shortViews": return currentSongObj["shortViewCountText"]["simpleText"]; case "description": return currentSongObj["descriptionSnippet"]["runs"][0]["text"]; } } catch (error) { return null; } }; export default async (queryString, first = false) => { try { // get html response for the query const htmlContent = await ( await fetch("https://www.youtube.com/results?search_query=" + encodeURIComponent(queryString)) ).text(); // compute indexes of the required json string in html document const index1 = htmlContent.indexOf(`window["ytInitialData"]`) + `window["ytInitialData"] = `.length; const index2 = htmlContent.indexOf(`window["ytInitialPlayerResponse"] = null;`) - 6; // convert the required string into json const jsonValue = await JSON.parse(htmlContent.substring(index1, index2)); // parsing to the required array of objects const arrayOfResponses = jsonValue.contents.twoColumnSearchResultsRenderer.primaryContents.sectionListRenderer.contents[0].itemSectionRenderer.contents; // computing length of array (ie. number of song objects) const arrayOfResponsesLen = await arrayOfResponses.length; // holds the ytCatResponse let ytCatResponse = []; // iterating through each song object to compile the ytCatResponse object for (let i = 0; i < arrayOfResponsesLen; i++) { // getting current song object let currentSongObj = arrayOfResponses[i]["videoRenderer"]; // filtering for valid video responses if (currentSongObj != null) { // skipping LIVE songs if (currentSongObj["badges"] != null) { var str = JSON.stringify(currentSongObj["badges"]); // check if it is a live video if (str.includes("LIVE")) { // skipping this song continue; } } // getting song parameters and pushing to array ytCatResponse.push({ title: jsonNullCheckResponse("title", currentSongObj), thumbnail: jsonNullCheckResponse("songThumbnail", currentSongObj), duration: jsonNullCheckResponse("songDuration", currentSongObj), videoId: jsonNullCheckResponse("songId", currentSongObj), channelName: jsonNullCheckResponse("channelName", currentSongObj), channelId: jsonNullCheckResponse("channelId", currentSongObj), uploadedOn: jsonNullCheckResponse("uploadedOn", currentSongObj), views: jsonNullCheckResponse("views", currentSongObj), description: jsonNullCheckResponse("description", currentSongObj) }); // breaking loop if first is true if (first) break; } } return ytCatResponse; } catch (error) { console.error(error); return []; } };
woorim960/woowahan-agile-codingtest
coding-test/minjae/dfs-bfs/미로탈출.js
<filename>coding-test/minjae/dfs-bfs/미로탈출.js const maze = [ [1, 0, 1, 0, 1, 0], [1, 1, 1, 1, 1, 1], [0, 0, 0, 0, 0, 1], [1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1], ]; const [MAZE_HEIGHT, MAZE_WIDTH] = [maze.length, maze[0].length]; //BFS는 큐2개 필요 function solution(x, y) { const dx = [-1, 1, 0, 0]; const dy = [0, 0, -1, 1]; const needVisitQueue = []; let nx, ny; nx = ny = 0; needVisitQueue.push([x, y]); while (needVisitQueue.length !== 0) { const node = needVisitQueue.shift(); [x, y] = node; for (let i = 0; i < 4; i++) { nx = x + dx[i]; ny = y + dy[i]; if (nx < 0 || ny < 0 || nx >= MAZE_HEIGHT || ny >= MAZE_WIDTH) continue; if (maze[nx][ny] === 0) continue; if (maze[nx][ny] === 1) { //continue 때문에 해당 조건이 일치하면 아래 구문은 실행하지 않음, else if 사용하지 않아도됨 maze[nx][ny] = maze[x][y] + 1; needVisitQueue.push([nx, ny]); } } } return maze[MAZE_HEIGHT - 1][MAZE_WIDTH - 1]; } console.log(solution(0, 0));
justremotephone/android_external_chromium_org
gpu/command_buffer/service/image_manager.cc
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "gpu/command_buffer/service/image_manager.h" #include "ui/gl/gl_image.h" namespace gpu { namespace gles2 { ImageManager::ImageManager() : release_after_use_(false) { } ImageManager::~ImageManager() { } void ImageManager::RegisterGpuMemoryBuffer(int32 id, gfx::GpuMemoryBufferHandle buffer, size_t width, size_t height, unsigned internalformat) { if (id <= 0) { DVLOG(0) << "Cannot register GPU memory buffer with non-positive ID."; return; } if (LookupImage(id)) { DVLOG(0) << "GPU memory buffer ID already in use."; return; } scoped_refptr<gfx::GLImage> gl_image = gfx::GLImage::CreateGLImageForGpuMemoryBuffer(buffer, gfx::Size(width, height), internalformat); if (!gl_image) return; if (release_after_use_) gl_image->SetReleaseAfterUse(); AddImage(gl_image.get(), id); } void ImageManager::UnregisterGpuMemoryBuffer(int32 id) { RemoveImage(id); } void ImageManager::AddImage(gfx::GLImage* image, int32 service_id) { gl_images_[service_id] = image; } void ImageManager::RemoveImage(int32 service_id) { gl_images_.erase(service_id); } gfx::GLImage* ImageManager::LookupImage(int32 service_id) { GLImageMap::const_iterator iter = gl_images_.find(service_id); if (iter != gl_images_.end()) return iter->second.get(); return NULL; } void ImageManager::SetReleaseAfterUse() { release_after_use_ = true; } } // namespace gles2 } // namespace gpu
mrroach/CentralServer
csrv/model/cards/runner/card01012.py
<filename>csrv/model/cards/runner/card01012.py from csrv.model import actions from csrv.model import errors from csrv.model import events from csrv.model import modifiers from csrv.model import timing_phases from csrv.model.cards import card_info from csrv.model.cards import ice from csrv.model.cards import program class InstallCard01012(actions.InstallProgram): def is_usable(self): if not actions.InstallProgram.is_usable(self): return False for server in self.game.corp.servers: for card in server.ice.cards: if card.is_rezzed: return True def resolve(self, response=None, ignore_clicks=False, ignore_all_costs=False): if not response or not response.host: raise errors.InvalidResponse( 'You must choose a host for Card01012.') actions.InstallProgram.resolve( self, response, ignore_clicks=ignore_clicks, ignore_all_costs=ignore_all_costs) class Card01012(program.Program): NAME = u'Card01012' SET = card_info.CORE NUMBER = 12 SIDE = card_info.RUNNER FACTION = card_info.ANARCH INFLUENCE = 2 UNIQUE = False KEYWORDS = set([ card_info.VIRUS, ]) COST = 2 MEMORY = 1 IMAGE_SRC = '01012.png' WHEN_INSTALLED_LISTENS = [ events.RunnerTurnBegin, events.IceStrengthChanged, ] def __init__(self, game, player, location=None): program.Program.__init__(self, game, player, location) self.modifier = None def install_host_targets(self): targets = [] for server in self.game.corp.servers: for card in server.ice.cards: if card.is_rezzed: targets.append(card) return targets def get_virus_counters(self): return self._virus_counters def set_virus_counters(self, value): self._virus_counters = value if not self.modifier: self.modifier = modifiers.IceStrengthModifier( self.game, 0, card=self.host) self.modifier.set_value(-1 * self._virus_counters) virus_counters = property(get_virus_counters, set_virus_counters) def on_ice_strength_changed(self, sender, event): if self.host.strength <= 0: self.host.trash() def build_actions(self): program.Program.build_actions(self) self.install_action = InstallCard01012( self.game, self.player, self) def on_runner_turn_begin(self, sender, event): self.virus_counters += 1 def on_uninstall(self): program.Program.on_uninstall(self) if self.modifier: self.modifier.remove() self.modifier = None
nonomal/ultimateshell
ultimate-terminal/src/main/java/com/jediterm/terminal/TerminalMode.java
<filename>ultimate-terminal/src/main/java/com/jediterm/terminal/TerminalMode.java package com.jediterm.terminal; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public enum TerminalMode { Null, CursorKey { @Override public void setEnabled(Terminal terminal, boolean enabled) { terminal.setApplicationArrowKeys(enabled); } }, ANSI, WideColumn { @Override public void setEnabled(Terminal terminal, boolean enabled) { // Skip resizing as it would require to resize parent container. // Other com.jediterm.terminal emulators (iTerm2, Terminal.app, GNOME Terminal) ignore it too. terminal.clearScreen(); terminal.resetScrollRegions(); } }, CursorVisible { @Override public void setEnabled(Terminal terminal, boolean enabled) { terminal.setCursorVisible(enabled); } }, AlternateBuffer { @Override public void setEnabled(Terminal terminal, boolean enabled) { terminal.useAlternateBuffer(enabled); } }, SmoothScroll, ReverseVideo, OriginMode { @Override public void setEnabled(Terminal terminal, boolean enabled) { } }, AutoWrap { @Override public void setEnabled(Terminal terminal, boolean enabled) { //we do nothing just switching the mode } }, AutoRepeatKeys, Interlace, Keypad { @Override public void setEnabled(Terminal terminal, boolean enabled) { terminal.setApplicationKeypad(enabled); } }, StoreCursor { @Override public void setEnabled(Terminal terminal, boolean enabled) { if (enabled) { terminal.saveCursor(); } else { terminal.restoreCursor(); } } }, CursorBlinking { @Override public void setEnabled(Terminal terminal, boolean enabled) { terminal.setBlinkingCursor(enabled); } }, AllowWideColumn, ReverseWrapAround, AutoNewLine { @Override public void setEnabled(Terminal terminal, boolean enabled) { terminal.setAutoNewLine(enabled); } }, KeyboardAction, InsertMode, SendReceive, EightBitInput, //Interpret "meta" key, sets eighth bit. (enables the eightBitInput resource). // http://www.leonerd.org.uk/hacks/hints/xterm-8bit.html AltSendsEscape //See section Alt and Meta Keys in http://invisible-island.net/xterm/ctlseqs/ctlseqs.html { @Override public void setEnabled(Terminal terminal, boolean enabled) { terminal.setAltSendsEscape(enabled); } }, // https://cirw.in/blog/bracketed-paste // http://www.xfree86.org/current/ctlseqs.html#Bracketed%20Paste%20Mode BracketedPasteMode { @Override public void setEnabled(Terminal terminal, boolean enabled) { terminal.setBracketedPasteMode(enabled); } }; private static final Logger LOG = LoggerFactory.getLogger(com.jediterm.terminal.TerminalMode.class); public void setEnabled(Terminal terminal, boolean enabled) { LOG.error("Mode " + name() + " is not implemented, setting to " + enabled); } }
cyberfined/testguru
db/migrate/20210119130856_create_test_passages.rb
<filename>db/migrate/20210119130856_create_test_passages.rb class CreateTestPassages < ActiveRecord::Migration[6.1] def change create_table :test_passages do |t| t.references :user, null: false, foreign_key: true t.references :test, null: false, foreign_key: true t.references :current_question, foreign_key: { to_table: :questions } t.integer :points, null: false, default: 0 t.timestamps end end end
Rohit-1609/Core_Java
Section3/src/com/assignment/collections/PrimitiveWrapper.java
package com.corejava.assignment.collections; public class PrimitiveWrapper { public static void main(String[] args) { System.out.println("main method Strated"); int number1=50; System.out.println(number1); Integer number2=40; System.out.println(number2); short number3=10; System.out.println(number3); Short number4=11; System.out.println(number4); long number5= 999998873272l; System.out.println(number5); Long number6= 4238383739333l; System.out.println(number6); byte number7=1; System.out.println(number7); Byte number8=0; System.out.println(number8); float number9= 123.34f; System.out.println(number9); Float number10= 12345.50f; System.out.println(number10); double number11= 32189.50; System.out.println(number11); Double number12=1231244444.50; System.out.println(number12); char char1='A'; System.out.println(char1); Character char2= 'B'; System.out.println(char2); boolean number13= true; System.out.println(number13); Boolean number14=false; System.out.println(number14); System.out.println("Main method ended"); } }
kosakkun/MM-Tester
problems/SlidingPuzzle/tester/InputData.java
<reponame>kosakkun/MM-Tester import java.security.SecureRandom; public class InputData implements Cloneable { public static final int MAX_N = 10; public static final int MIN_N = 4; public static final int SHUFFLE = 100000; public int N; public int[][] B; public InputData (final int N) { this.N = N; this.B = new int[N][N]; } @Override public String toString () { StringBuffer sb = new StringBuffer(); sb.append(N).append('\n'); for (int x = 0; x < N; x++) { for (int y = 0; y < N; y++) { sb.append(B[x][y]); sb.append((y < N - 1) ? ' ' : '\n'); } } return sb.toString(); } @Override public InputData clone () { InputData id = null; try { id = (InputData)super.clone(); id.B = new int[this.N][]; for (int i = 0; i < this.N; i++) { id.B[i] = this.B[i].clone(); } } catch (Exception e) { e.printStackTrace(); } return id; } public static InputData genInputData ( final long seed) throws Exception { SecureRandom rnd = SecureRandom.getInstance("SHA1PRNG"); rnd.setSeed(seed); final int N = (seed <= 7) ? (int)seed + 3 : rnd.nextInt(MAX_N - MIN_N + 1) + MIN_N; InputData id = new InputData(N); for (int r = 0; r < id.N; r++) { for (int c = 0; c < id.N; c++) { id.B[r][c] = r * id.N + c + 1; } } id.B[id.N - 1][id.N - 1] = -1; for (int i = 0; i < SHUFFLE; i++) { while (true) { int r = rnd.nextInt(id.N); int c = rnd.nextInt(id.N); if (Checker.movePannel(id.N, r, c, id.B)) { break; } } } return id; } }
bborn/eschaton
slices/google_maps/google/tabbed_info_window.rb
module Google # TODO - Merge all this into the one and only InfoWindow class TabbedInfoWindow < MapObject def initialize(options = {}) super self << "tabs = [];" @tabs = [] end def add_tab(options) #InfoWindow.build_content(options) do |content| # self << "tabs.push(new GInfoWindowTab(#{options[:title].to_js}, \"<div style=\'border: solid 1px red; width: 250px; height: 350px;\'>hello boss<br/>More please<br/>tdasdasdas<br/>dasdasdasda<br/>adasdasda<br/></div>\"));" #end end def open(options = {}) options.default! :location => :center location = Google::OptionsHelper.to_location(options[:location]) self << "center = #{self.var}.getCenter();" if location == :center self << "#{self.var}.openInfoWindowTabs(#{location}, tabs);" end end end
fengbaoheng/leetcode
java/unclassified/algorithms/234.palindrome-linked-list.java
/* * @lc app=leetcode.cn id=234 lang=java * * [234] 回文链表 */ /** * Definition for singly-linked list. * public class ListNode { * int val; * ListNode next; * ListNode(int x) { val = x; } * } */ class Solution { public boolean isPalindrome(ListNode head) { if (head == null || head.next == null) { return true; } ListNode back = split(head); back = reverse(back); return isSame(head, back); } // 使用快慢指针将链表拆分为等长的两部分 // 即长度为奇数时, 去除最中间的部分 private ListNode split(ListNode head) { if (head == null || head.next == null) { return null; } ListNode low = head; ListNode fast = head.next; while (fast.next != null && fast.next.next != null) { low = low.next; fast = fast.next.next; } ListNode back = fast.next == null ? low.next : low.next.next; low.next = null; return back; } // 反转链表 private ListNode reverse(ListNode head) { if (head == null || head.next == null) { return head; } ListNode newHead = head; head = head.next; newHead.next = null; // 头节点变尾节点 while (head != null) { ListNode curNode = head; head = head.next; curNode.next = newHead; newHead = curNode; } return newHead; } // 判断两链表是否相等 private boolean isSame(ListNode head1, ListNode head2) { while (head1 != null && head2 != null) { if (head1.val != head2.val) { return false; } head1 = head1.next; head2 = head2.next; } // 两个链表是否等长度 return head1 == null && head2 == null; } }
Glost/db_nets_renew_plugin
root/prj/sol/projects/renew2.5source/renew2.5/src/Gui/src/de/renew/gui/NetInstanceHandle.java
package de.renew.gui; import CH.ifa.draw.framework.DrawingView; import CH.ifa.draw.framework.Figure; import CH.ifa.draw.util.ColorMap; import de.renew.remote.NetInstanceAccessor; import java.awt.Color; import java.awt.Rectangle; public class NetInstanceHandle extends ClickHandle { NetInstanceAccessor netInstance; /** * Creates a new handle within the given rectangle, surrounded by a blue * border, displaying the name of the instance. **/ public NetInstanceHandle(Figure owner, Rectangle box, NetInstanceAccessor netInstance) { super(owner, ColorMap.NONE, Color.blue, box); this.netInstance = netInstance; } public void invokeStart(int x, int y, DrawingView view) { super.invokeStart(x, y, view); noChangesMade(); ((CPNApplication) view.editor()).openInstanceDrawing(netInstance); } // protected void drawInner(Graphics g) { // g.setColor(Color.blue); // g.drawLine(box.x,box.y+box.height-1,box.x+box.width,box.y+box.height-1); // } }
westdart/nifi
nifi-registry/nifi-registry-core/nifi-registry-framework/src/main/java/org/apache/nifi/registry/extension/ExtensionCloseable.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.registry.extension; import java.io.Closeable; import java.io.IOException; public class ExtensionCloseable implements Closeable { private final ClassLoader toSet; private ExtensionCloseable(ClassLoader toSet) { this.toSet = toSet; } public static ExtensionCloseable withComponentClassLoader(final ExtensionManager manager, final Class componentClass) { final ClassLoader current = Thread.currentThread().getContextClassLoader(); final ExtensionCloseable closeable = new ExtensionCloseable(current); ClassLoader componentClassLoader = manager.getExtensionClassLoader(componentClass.getName()); if (componentClassLoader == null) { componentClassLoader = componentClass.getClassLoader(); } Thread.currentThread().setContextClassLoader(componentClassLoader); return closeable; } public static ExtensionCloseable withClassLoader(final ClassLoader componentClassLoader) { final ClassLoader current = Thread.currentThread().getContextClassLoader(); final ExtensionCloseable closeable = new ExtensionCloseable(current); Thread.currentThread().setContextClassLoader(componentClassLoader); return closeable; } @Override public void close() throws IOException { if (toSet != null) { Thread.currentThread().setContextClassLoader(toSet); } } }
IPDSnelting/velcom
backend/backend/src/main/java/de/aaaaaaah/velcom/backend/restapi/endpoints/RepoEndpoint.java
<reponame>IPDSnelting/velcom package de.aaaaaaah.velcom.backend.restapi.endpoints; import static java.util.stream.Collectors.toList; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import de.aaaaaaah.velcom.backend.access.caches.AvailableDimensionsCache; import de.aaaaaaah.velcom.backend.access.dimensionaccess.DimensionReadAccess; import de.aaaaaaah.velcom.backend.access.dimensionaccess.entities.Dimension; import de.aaaaaaah.velcom.backend.access.repoaccess.RepoWriteAccess; import de.aaaaaaah.velcom.backend.access.repoaccess.entities.BranchName; import de.aaaaaaah.velcom.backend.access.repoaccess.entities.RemoteUrl; import de.aaaaaaah.velcom.backend.access.repoaccess.entities.Repo; import de.aaaaaaah.velcom.backend.access.repoaccess.entities.Repo.GithubInfo; import de.aaaaaaah.velcom.backend.access.repoaccess.entities.RepoId; import de.aaaaaaah.velcom.backend.access.repoaccess.exceptions.FailedToAddRepoException; import de.aaaaaaah.velcom.backend.access.repoaccess.exceptions.NoSuchRepoException; import de.aaaaaaah.velcom.backend.listener.Listener; import de.aaaaaaah.velcom.backend.listener.SynchronizeCommitsException; import de.aaaaaaah.velcom.backend.restapi.authentication.Admin; import de.aaaaaaah.velcom.backend.restapi.jsonobjects.JsonBranch; import de.aaaaaaah.velcom.backend.restapi.jsonobjects.JsonDimension; import de.aaaaaaah.velcom.backend.restapi.jsonobjects.JsonRepo; import io.dropwizard.auth.Auth; import io.dropwizard.jersey.PATCH; import io.micrometer.core.annotation.Timed; import java.time.Instant; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import javax.annotation.Nullable; import javax.validation.constraints.NotNull; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response.Status; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Path("/repo") @Produces(MediaType.APPLICATION_JSON) public class RepoEndpoint { // Most of the logic found here was copied pretty much directly from the old repo endpoint. private static final Logger LOGGER = LoggerFactory.getLogger(RepoEndpoint.class); private final DimensionReadAccess dimensionAccess; private final RepoWriteAccess repoAccess; private final AvailableDimensionsCache availableDimensionsCache; private final Listener listener; public RepoEndpoint(DimensionReadAccess dimensionAccess, RepoWriteAccess repoAccess, AvailableDimensionsCache availableDimensionsCache, Listener listener) { this.dimensionAccess = dimensionAccess; this.repoAccess = repoAccess; this.availableDimensionsCache = availableDimensionsCache; this.listener = listener; } private JsonRepo toJsonRepo(Repo repo) { List<JsonBranch> branches = repoAccess.getAllBranches(repo.getId()).stream() .map(JsonBranch::fromBranch) .collect(toList()); Set<Dimension> dimensions = availableDimensionsCache .getAvailableDimensionsFor(dimensionAccess, repo.getId()); List<JsonDimension> jsonDimensions = dimensionAccess.getDimensionInfos(dimensions).stream() .map(JsonDimension::fromDimensionInfo) .collect(toList()); return new JsonRepo( repo.getIdAsUuid(), repo.getName(), repo.getRemoteUrlAsString(), branches, jsonDimensions, repo.getGithubInfo() .map(GithubInfo::getCommentCutoff) .map(Instant::getEpochSecond) .orElse(null) ); } @POST @Timed(histogram = true) public PostReply post(@Auth Admin admin, @NotNull PostRequest request) throws FailedToAddRepoException { RemoteUrl remoteUrl = new RemoteUrl(request.getRemoteUrl()); Repo repo = repoAccess.addRepo(request.getName(), remoteUrl); try { listener.synchronizeCommitsForRepo(repo); return new PostReply(toJsonRepo(repo)); } catch (SynchronizeCommitsException e) { repoAccess.deleteRepo(repo.getId()); throw new WebApplicationException("Repo could not be cloned, invalid remote url", Status.BAD_REQUEST); } } private static class PostRequest { private final String name; private final String remoteUrl; @JsonCreator public PostRequest( @JsonProperty(required = true) String name, @JsonProperty(required = true) String remoteUrl ) { this.name = name; this.remoteUrl = remoteUrl; } public String getName() { return name; } public String getRemoteUrl() { return remoteUrl; } } private static class PostReply { private final JsonRepo repo; public PostReply(JsonRepo repo) { this.repo = repo; } public JsonRepo getRepo() { return repo; } } @GET @Path("{repoid}") @Timed(histogram = true) public GetReply get(@PathParam("repoid") UUID repoUuid) throws NoSuchRepoException { RepoId repoId = new RepoId(repoUuid); Repo repo = repoAccess.getRepo(repoId); List<JsonGithubCommand> commands = repoAccess.getCommands(repoId) .stream() .map(command -> new JsonGithubCommand( command.getPr(), command.getComment(), command.getState().getTextualRepresentation() )) .collect(toList()); return new GetReply(toJsonRepo(repo), commands); } private static class GetReply { public final JsonRepo repo; public final List<JsonGithubCommand> githubCommands; public GetReply(JsonRepo repo, List<JsonGithubCommand> githubCommands) { this.repo = repo; this.githubCommands = githubCommands; } } private static class JsonGithubCommand { public final long prNumber; public final long commentId; public final String status; public JsonGithubCommand(long prNumber, long commentId, String status) { this.prNumber = prNumber; this.commentId = commentId; this.status = status; } } @PATCH @Path("{repoid}") @Timed(histogram = true) public void patch( @Auth Admin admin, @PathParam("repoid") UUID repoUuid, @NotNull PatchRequest request ) throws NoSuchRepoException { RepoId repoId = new RepoId(repoUuid); // Guards whether the repo exists (that's why it's so high up in the function) Repo repo = repoAccess.getRepo(repoId); repoAccess.updateRepo( repoId, request.getName().orElse(null), request.getRemoteUrl().map(RemoteUrl::new).orElse(null) ); request.getTrackedBranches().ifPresent(trackedBranches -> { Set<BranchName> trackedBranchNames = trackedBranches.stream() .map(BranchName::fromName) .collect(Collectors.toSet()); repoAccess.setTrackedBranches(repoId, trackedBranchNames); try { listener.synchronizeCommitsForRepo(repo); } catch (SynchronizeCommitsException e) { LOGGER.warn("Failed to update repo {} successfully", repoId); } }); request.getGithubToken().ifPresent(token -> { String stripped = token.strip(); if (stripped.equals("")) { repoAccess.unsetGithubAuthToken(repoId); } else { repoAccess.setGithubAuthToken(repoId, stripped); } }); } private static class PatchRequest { @Nullable private final String name; @Nullable private final String remoteUrl; @Nullable private final List<String> trackedBranches; @Nullable private final String githubToken; @JsonCreator public PatchRequest(@Nullable String name, @Nullable String remoteUrl, @Nullable List<String> trackedBranches, @Nullable String githubToken) { this.name = name; this.remoteUrl = remoteUrl; this.trackedBranches = trackedBranches; this.githubToken = githubToken; } public Optional<String> getName() { return Optional.ofNullable(name); } public Optional<String> getRemoteUrl() { return Optional.ofNullable(remoteUrl); } public Optional<List<String>> getTrackedBranches() { return Optional.ofNullable(trackedBranches); } public Optional<String> getGithubToken() { return Optional.ofNullable(githubToken); } } @DELETE @Path("{repoid}") @Timed(histogram = true) public void delete(@Auth Admin admin, @PathParam("repoid") UUID repoUuid) throws NoSuchRepoException { RepoId repoId = new RepoId(repoUuid); repoAccess.guardRepoExists(repoId); // Also deletes the repo from all tables in the db that have a foreign key on the repo table // since all (relevant) foreign key restraints are marked as ON DELETE CASCADE. This includes // the queue table. repoAccess.deleteRepo(repoId); } }
carlosevmoura/courses-notes
programming/python-curso_em_video/exercises/ex104.py
<reponame>carlosevmoura/courses-notes def leia_inteiro(_texto): while True: numero = str(input(_texto)) if numero.isnumeric(): return numero else: print('Erro! Digite um número inteiro válido!') numero = leia_inteiro('Digite um número: ') print('Você acabou de digitar o número {}.'.format(numero))
kaigexie/semparse-core
semparse-core/src/main/java/io/github/semlink/semlink/aligner/FilterReferenceAligner.java
<reponame>kaigexie/semparse-core /* * Copyright 2019 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.semlink.semlink.aligner; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import java.util.stream.Collectors; import io.github.semlink.propbank.type.ArgNumber; import io.github.semlink.semlink.PropBankPhrase; import lombok.NonNull; /** * Remove reference arguments from consideration in alignment. * * @author jgung */ public class FilterReferenceAligner implements PbVnAligner { @Override public void align(@NonNull PbVnAlignment alignment) { Multimap<ArgNumber, PropBankPhrase> phraseMap = Multimaps.index(alignment.propbankPhrases().stream() .filter(arg -> !arg.isReference()) .collect(Collectors.toList()), PropBankPhrase::getNumber); alignment.propbankPhrases(alignment.propbankPhrases().stream() .filter(phrase -> !phrase.isReferenceOrContinuation() || !phraseMap.containsKey(phrase.getNumber())) .collect(Collectors.toList())); } }
godot-addons/godot-sfs2x
thirdparty/sfs2x/Logging/LoggerEvent.h
<gh_stars>1-10 // =================================================================== // // Description // Contains the definition of LoggerEvent // // Revision history // Date Description // 30-Nov-2012 First version // // =================================================================== #ifndef __LoggerEvent__ #define __LoggerEvent__ #include "../Core/BaseEvent.h" #include "LogLevel.h" #include <boost/shared_ptr.hpp> // Boost Asio shared pointer // STL library settings #if defined(_MSC_VER) #pragma warning(disable:4786) // STL library: disable warning 4786; this warning is generated due to a Microsoft bug #endif #include <string> // STL library: string object #include <map> // STL library: map object using namespace std; // Declare the STL namespace using namespace Sfs2X::Core; namespace Sfs2X { namespace Logging { // ------------------------------------------------------------------- // Class LoggerEvent // ------------------------------------------------------------------- class DLLImportExport LoggerEvent : public BaseEvent { public: // ------------------------------------------------------------------- // Public methods // ------------------------------------------------------------------- LoggerEvent(LogLevel level, boost::shared_ptr<map<string, boost::shared_ptr<void> > > parameters); ~LoggerEvent(); static boost::shared_ptr<string> LogEventType(LogLevel level); boost::shared_ptr<string> ToString(); boost::shared_ptr<void> Clone(); // ------------------------------------------------------------------- // Public members // ------------------------------------------------------------------- protected: // ------------------------------------------------------------------- // Protected methods // ------------------------------------------------------------------- // ------------------------------------------------------------------- // Protected members // ------------------------------------------------------------------- private: // ------------------------------------------------------------------- // Private methods // ------------------------------------------------------------------- // ------------------------------------------------------------------- // Private members // ------------------------------------------------------------------- LogLevel level; }; } // namespace Logging } // namespace Sfs2X #endif
fkwai/geolearn
app/streamflow/regional/ts.py
import matplotlib.pyplot as plt from hydroDL.post import axplot, figplot import scipy from hydroDL.data import dbBasin from hydroDL.master import basinFull import os import pandas as pd from hydroDL import kPath, utils import importlib import time import numpy as np from hydroDL.data import usgs, gageII, gridMET, ntn, transform caseLst = ['080401', '080305', '080304', '090203', '090402', '080301', '090403', '050301'] dataName = 'Q90' dm = dbBasin.DataModelFull(dataName) outName = '{}-B10'.format(dataName) yPLst = list() yOLst = list() yP, ycP = basinFull.testModel( outName, DM=dm, batchSize=20, testSet='all') yO, ycO = basinFull.getObs(outName, 'all', DM=dm) indT = np.where(dm.t == np.datetime64('2010-01-01'))[0][0] # for case in caseLst: case = caseLst[0] testSet = 'Eco'+case indS = [dm.siteNoLst.index(siteNo) for siteNo in dm.subset[testSet]] yPLst.append(yP[:, indS, 0]) yOLst.append(yO[:, indS, 0]) trainLst = [case[:2], case[:4], case[:6]] outLst = ['{}-Eco{}-B10-gs'.format(dataName, x) for x in trainLst] for outName in outLst: yP, ycP = basinFull.testModel( outName, DM=dm, batchSize=20, testSet=testSet) yO, ycO = basinFull.getObs(outName, testSet, DM=dm) yPLst.append(yP[:, :, 0]) yOLst.append(yO[:, :, 0]) ns = yP.shape[1] k = np.random.randint(ns) fig, ax = plt.subplots(figsize=(12, 4)) dataLst = [yP[:, k] for yP in yPLst]+[yOLst[0][:, k]] labLst = ['CONUS', 'lev0', 'lev1', 'lev2', 'obs'] cLst = 'rmgbk' axplot.plotTS(ax, dm.t, dataLst, styLst='-----', cLst=cLst, legLst=labLst) fig.show() rmse0 = utils.stat.calRmse(yPLst[0][indT:, :], yPLst[3][indT:, :]) rmse1 = utils.stat.calRmse(yPLst[1][indT:, :], yPLst[3][indT:, :]) rmse2 = utils.stat.calRmse(yPLst[2][indT:, :], yPLst[3][indT:, :]) # plot box label1 = caseLst label2 = ['CONUS', 'lev0', 'lev1', ] dataBox = [[rmse0, rmse1, rmse2]] fig = figplot.boxPlot(dataBox, widths=0.5, cLst='brgk', label1=label1, label2=label2, figsize=(6, 4)) fig.show()
mmateu/BookTradingClub
presentation/src/main/java/com/example/BookTradingClub/presentation/controller/BookTradeController.java
<filename>presentation/src/main/java/com/example/BookTradingClub/presentation/controller/BookTradeController.java package com.example.BookTradingClub.presentation.controller; import com.example.BookTradingClub.presentation.dto.RequestCreatedDto; import com.example.BookTradingClub.presentation.dto.RequestDecisionDto; import com.example.BookTradingClub.presentation.dto.RequestToCreateDto; import com.example.BookTradingClub.service.TradeService; import com.example.BookTradingClub.service.domain.ExchangeState; import com.example.BookTradingClub.service.domain.Request; import org.modelmapper.ModelMapper; import org.modelmapper.TypeToken; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import java.lang.reflect.Type; import java.security.Principal; import java.util.List; @RestController @RequestMapping(value = "/exchanges") public class BookTradeController { @Autowired private TradeService tradeService; @Autowired private ModelMapper mapper; @RequestMapping(method = RequestMethod.POST) RequestCreatedDto makeExchangeRequest(@RequestBody RequestToCreateDto requestToCreateDto, Principal principal){ Request request = mapper.map(requestToCreateDto, Request.class); Request newRequest = tradeService.requestTrade(principal.getName(), request); return mapper.map(newRequest, RequestCreatedDto.class); } @RequestMapping(method = RequestMethod.GET, value = "/pending") List<RequestCreatedDto> getPendingRequests() { List<Request> requests = tradeService.getTradesByState(ExchangeState.PENDING); Type requestsCreatedDtoListType = new TypeToken<List<RequestCreatedDto>>(){}.getType(); return mapper.map(requests, requestsCreatedDtoListType); } @RequestMapping(method = RequestMethod.GET, value = "/accepted") List<RequestCreatedDto> getAcceptedRequests() { List<Request> requests = tradeService.getTradesByState(ExchangeState.ACCEPTED); Type requestsCreatedDtoListType = new TypeToken<List<RequestCreatedDto>>(){}.getType(); return mapper.map(requests, requestsCreatedDtoListType); } @RequestMapping(method = RequestMethod.GET, value = "/rejected") List<RequestCreatedDto> getRejectedRequests() { List<Request> requests = tradeService.getTradesByState(ExchangeState.REJECTED); Type requestsCreatedDtoListType = new TypeToken<List<RequestCreatedDto>>(){}.getType(); return mapper.map(requests, requestsCreatedDtoListType); } @RequestMapping(method = RequestMethod.POST, value = "/accept") RequestCreatedDto acceptRequest(Principal principal, @RequestBody final RequestDecisionDto requestDecision) { Request request = mapper.map(requestDecision, Request.class); Request requestAccepted = tradeService.acceptTrade(principal.getName(), request); return mapper.map(requestAccepted, RequestCreatedDto.class); } @RequestMapping(method = RequestMethod.POST, value = "/reject") RequestCreatedDto rejectRequest(Principal principal, @RequestBody final RequestDecisionDto requestDecision) { Request request = mapper.map(requestDecision, Request.class); Request requestRejected = tradeService.rejectTrade(principal.getName(), request); return mapper.map(requestRejected, RequestCreatedDto.class); } }
navikt/fp-formidling
web/src/main/java/no/nav/foreldrepenger/melding/web/server/jetty/db/DatasourceUtil.java
package no.nav.foreldrepenger.melding.web.server.jetty.db; import static no.nav.foreldrepenger.konfig.Cluster.LOCAL; import java.util.Properties; import javax.sql.DataSource; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import io.micrometer.core.instrument.Metrics; import no.nav.foreldrepenger.konfig.Cluster; import no.nav.foreldrepenger.konfig.Environment; import no.nav.vault.jdbc.hikaricp.HikariCPVaultUtil; import no.nav.vault.jdbc.hikaricp.VaultError; public class DatasourceUtil { private static final Environment ENV = Environment.current(); private static final String VAULT_PREPROD_NAVN = "preprod-fss"; public static DataSource createDatasource(String datasourceName, DatasourceRole role, Cluster cluster, int maxPoolSize) { String rolePrefix = getRolePrefix(datasourceName); HikariConfig config = initConnectionPoolConfig(datasourceName, maxPoolSize); if (LOCAL.equals(cluster)) { return createLocalDatasource(config, "public", rolePrefix, ENV.getProperty(datasourceName + ".password")); } return createVaultDatasource(config, mountPath(cluster), getRole(rolePrefix, role)); } private static String mountPath(Cluster cluster) { return "postgresql/" + (cluster.isProd() ? cluster.clusterName() : VAULT_PREPROD_NAVN); } private static String getRole(String rolePrefix, DatasourceRole role) { return String.format("%s-%s", rolePrefix, role.name().toLowerCase()); } public static String getDbRole(String datasoureName, DatasourceRole role) { return String.format("%s-%s", getRolePrefix(datasoureName), role.name().toLowerCase()); } private static String getRolePrefix(String datasourceName) { return ENV.getProperty(datasourceName + ".username"); } private static HikariConfig initConnectionPoolConfig(String dataSourceName, int maxPoolSize) { HikariConfig config = new HikariConfig(); config.setJdbcUrl(ENV.getProperty(dataSourceName + ".url")); config.setMinimumIdle(0); config.setMaximumPoolSize(maxPoolSize); config.setIdleTimeout(10001); config.setMaxLifetime(30001); config.setConnectionTestQuery("select 1"); config.setDriverClassName("org.postgresql.Driver"); config.setMetricRegistry(Metrics.globalRegistry); Properties dsProperties = new Properties(); config.setDataSourceProperties(dsProperties); return config; } private static DataSource createVaultDatasource(HikariConfig config, String mountPath, String role) { try { return HikariCPVaultUtil.createHikariDataSourceWithVaultIntegration(config, mountPath, role); } catch (VaultError vaultError) { throw new IllegalStateException("Vault feil ved opprettelse av databaseforbindelse", vaultError); } } private static DataSource createLocalDatasource(HikariConfig config, String schema, String username, String password) { config.setUsername(username); config.setPassword(password); // NOSONAR false positive if (schema != null && !schema.isEmpty()) { config.setSchema(schema); } return new HikariDataSource(config); } }
AhmedAliGokhy/gitlabhq
lib/gitlab/database/migration_helpers.rb
<reponame>AhmedAliGokhy/gitlabhq module Gitlab module Database module MigrationHelpers include Gitlab::Database::ArelMethods BACKGROUND_MIGRATION_BATCH_SIZE = 1000 # Number of rows to process per job BACKGROUND_MIGRATION_JOB_BUFFER_SIZE = 1000 # Number of jobs to bulk queue at a time # Adds `created_at` and `updated_at` columns with timezone information. # # This method is an improved version of Rails' built-in method `add_timestamps`. # # Available options are: # default - The default value for the column. # null - When set to `true` the column will allow NULL values. # The default is to not allow NULL values. def add_timestamps_with_timezone(table_name, options = {}) options[:null] = false if options[:null].nil? [:created_at, :updated_at].each do |column_name| if options[:default] && transaction_open? raise '`add_timestamps_with_timezone` with default value cannot be run inside a transaction. ' \ 'You can disable transactions by calling `disable_ddl_transaction!` ' \ 'in the body of your migration class' end # If default value is presented, use `add_column_with_default` method instead. if options[:default] add_column_with_default( table_name, column_name, :datetime_with_timezone, default: options[:default], allow_null: options[:null] ) else add_column(table_name, column_name, :datetime_with_timezone, options) end end end # Creates a new index, concurrently when supported # # On PostgreSQL this method creates an index concurrently, on MySQL this # creates a regular index. # # Example: # # add_concurrent_index :users, :some_column # # See Rails' `add_index` for more info on the available arguments. def add_concurrent_index(table_name, column_name, options = {}) if transaction_open? raise 'add_concurrent_index can not be run inside a transaction, ' \ 'you can disable transactions by calling disable_ddl_transaction! ' \ 'in the body of your migration class' end if Database.postgresql? options = options.merge({ algorithm: :concurrently }) disable_statement_timeout end if index_exists?(table_name, column_name, options) Rails.logger.warn "Index not created because it already exists (this may be due to an aborted migration or similar): table_name: #{table_name}, column_name: #{column_name}" return end add_index(table_name, column_name, options) end # Removes an existed index, concurrently when supported # # On PostgreSQL this method removes an index concurrently. # # Example: # # remove_concurrent_index :users, :some_column # # See Rails' `remove_index` for more info on the available arguments. def remove_concurrent_index(table_name, column_name, options = {}) if transaction_open? raise 'remove_concurrent_index can not be run inside a transaction, ' \ 'you can disable transactions by calling disable_ddl_transaction! ' \ 'in the body of your migration class' end if supports_drop_index_concurrently? options = options.merge({ algorithm: :concurrently }) disable_statement_timeout end unless index_exists?(table_name, column_name, options) Rails.logger.warn "Index not removed because it does not exist (this may be due to an aborted migration or similar): table_name: #{table_name}, column_name: #{column_name}" return end remove_index(table_name, options.merge({ column: column_name })) end # Removes an existing index, concurrently when supported # # On PostgreSQL this method removes an index concurrently. # # Example: # # remove_concurrent_index :users, "index_X_by_Y" # # See Rails' `remove_index` for more info on the available arguments. def remove_concurrent_index_by_name(table_name, index_name, options = {}) if transaction_open? raise 'remove_concurrent_index_by_name can not be run inside a transaction, ' \ 'you can disable transactions by calling disable_ddl_transaction! ' \ 'in the body of your migration class' end if supports_drop_index_concurrently? options = options.merge({ algorithm: :concurrently }) disable_statement_timeout end unless index_exists_by_name?(table_name, index_name) Rails.logger.warn "Index not removed because it does not exist (this may be due to an aborted migration or similar): table_name: #{table_name}, index_name: #{index_name}" return end remove_index(table_name, options.merge({ name: index_name })) end # Only available on Postgresql >= 9.2 def supports_drop_index_concurrently? return false unless Database.postgresql? version = select_one("SELECT current_setting('server_version_num') AS v")['v'].to_i version >= 90200 end # Adds a foreign key with only minimal locking on the tables involved. # # This method only requires minimal locking when using PostgreSQL. When # using MySQL this method will use Rails' default `add_foreign_key`. # # source - The source table containing the foreign key. # target - The target table the key points to. # column - The name of the column to create the foreign key on. # on_delete - The action to perform when associated data is removed, # defaults to "CASCADE". def add_concurrent_foreign_key(source, target, column:, on_delete: :cascade) # Transactions would result in ALTER TABLE locks being held for the # duration of the transaction, defeating the purpose of this method. if transaction_open? raise 'add_concurrent_foreign_key can not be run inside a transaction' end # While MySQL does allow disabling of foreign keys it has no equivalent # of PostgreSQL's "VALIDATE CONSTRAINT". As a result we'll just fall # back to the normal foreign key procedure. if Database.mysql? if foreign_key_exists?(source, target, column: column) Rails.logger.warn "Foreign key not created because it exists already " \ "(this may be due to an aborted migration or similar): " \ "source: #{source}, target: #{target}, column: #{column}" return end return add_foreign_key(source, target, column: column, on_delete: on_delete) else on_delete = 'SET NULL' if on_delete == :nullify end disable_statement_timeout key_name = concurrent_foreign_key_name(source, column) unless foreign_key_exists?(source, target, column: column) Rails.logger.warn "Foreign key not created because it exists already " \ "(this may be due to an aborted migration or similar): " \ "source: #{source}, target: #{target}, column: #{column}" # Using NOT VALID allows us to create a key without immediately # validating it. This means we keep the ALTER TABLE lock only for a # short period of time. The key _is_ enforced for any newly created # data. execute <<-EOF.strip_heredoc ALTER TABLE #{source} ADD CONSTRAINT #{key_name} FOREIGN KEY (#{column}) REFERENCES #{target} (id) #{on_delete ? "ON DELETE #{on_delete.upcase}" : ''} NOT VALID; EOF end # Validate the existing constraint. This can potentially take a very # long time to complete, but fortunately does not lock the source table # while running. # # Note this is a no-op in case the constraint is VALID already execute("ALTER TABLE #{source} VALIDATE CONSTRAINT #{key_name};") end def foreign_key_exists?(source, target = nil, column: nil) foreign_keys(source).any? do |key| if column key.options[:column].to_s == column.to_s else key.to_table.to_s == target.to_s end end end # Returns the name for a concurrent foreign key. # # PostgreSQL constraint names have a limit of 63 bytes. The logic used # here is based on Rails' foreign_key_name() method, which unfortunately # is private so we can't rely on it directly. def concurrent_foreign_key_name(table, column) "fk_#{Digest::SHA256.hexdigest("#{table}_#{column}_fk").first(10)}" end # Long-running migrations may take more than the timeout allowed by # the database. Disable the session's statement timeout to ensure # migrations don't get killed prematurely. (PostgreSQL only) def disable_statement_timeout execute('SET statement_timeout TO 0') if Database.postgresql? end def true_value Database.true_value end def false_value Database.false_value end # Updates the value of a column in batches. # # This method updates the table in batches of 5% of the total row count. # This method will continue updating rows until no rows remain. # # When given a block this method will yield two values to the block: # # 1. An instance of `Arel::Table` for the table that is being updated. # 2. The query to run as an Arel object. # # By supplying a block one can add extra conditions to the queries being # executed. Note that the same block is used for _all_ queries. # # Example: # # update_column_in_batches(:projects, :foo, 10) do |table, query| # query.where(table[:some_column].eq('hello')) # end # # This would result in this method updating only rows where # `projects.some_column` equals "hello". # # table - The name of the table. # column - The name of the column to update. # value - The value for the column. # # The `value` argument is typically a literal. To perform a computed # update, an Arel literal can be used instead: # # update_value = Arel.sql('bar * baz') # # update_column_in_batches(:projects, :foo, update_value) do |table, query| # query.where(table[:some_column].eq('hello')) # end # # Rubocop's Metrics/AbcSize metric is disabled for this method as Rubocop # determines this method to be too complex while there's no way to make it # less "complex" without introducing extra methods (which actually will # make things _more_ complex). # # rubocop: disable Metrics/AbcSize def update_column_in_batches(table, column, value) if transaction_open? raise 'update_column_in_batches can not be run inside a transaction, ' \ 'you can disable transactions by calling disable_ddl_transaction! ' \ 'in the body of your migration class' end table = Arel::Table.new(table) count_arel = table.project(Arel.star.count.as('count')) count_arel = yield table, count_arel if block_given? total = exec_query(count_arel.to_sql).to_hash.first['count'].to_i return if total == 0 # Update in batches of 5% until we run out of any rows to update. batch_size = ((total / 100.0) * 5.0).ceil max_size = 1000 # The upper limit is 1000 to ensure we don't lock too many rows. For # example, for "merge_requests" even 1% of the table is around 35 000 # rows for GitLab.com. batch_size = max_size if batch_size > max_size start_arel = table.project(table[:id]).order(table[:id].asc).take(1) start_arel = yield table, start_arel if block_given? start_id = exec_query(start_arel.to_sql).to_hash.first['id'].to_i loop do stop_arel = table.project(table[:id]) .where(table[:id].gteq(start_id)) .order(table[:id].asc) .take(1) .skip(batch_size) stop_arel = yield table, stop_arel if block_given? stop_row = exec_query(stop_arel.to_sql).to_hash.first update_arel = arel_update_manager .table(table) .set([[table[column], value]]) .where(table[:id].gteq(start_id)) if stop_row stop_id = stop_row['id'].to_i start_id = stop_id update_arel = update_arel.where(table[:id].lt(stop_id)) end update_arel = yield table, update_arel if block_given? execute(update_arel.to_sql) # There are no more rows left to update. break unless stop_row end end # Adds a column with a default value without locking an entire table. # # This method runs the following steps: # # 1. Add the column with a default value of NULL. # 2. Change the default value of the column to the specified value. # 3. Update all existing rows in batches. # 4. Set a `NOT NULL` constraint on the column if desired (the default). # # These steps ensure a column can be added to a large and commonly used # table without locking the entire table for the duration of the table # modification. # # table - The name of the table to update. # column - The name of the column to add. # type - The column type (e.g. `:integer`). # default - The default value for the column. # limit - Sets a column limit. For example, for :integer, the default is # 4-bytes. Set `limit: 8` to allow 8-byte integers. # allow_null - When set to `true` the column will allow NULL values, the # default is to not allow NULL values. # # This method can also take a block which is passed directly to the # `update_column_in_batches` method. def add_column_with_default(table, column, type, default:, limit: nil, allow_null: false, &block) if transaction_open? raise 'add_column_with_default can not be run inside a transaction, ' \ 'you can disable transactions by calling disable_ddl_transaction! ' \ 'in the body of your migration class' end disable_statement_timeout transaction do if limit add_column(table, column, type, default: nil, limit: limit) else add_column(table, column, type, default: nil) end # Changing the default before the update ensures any newly inserted # rows already use the proper default value. change_column_default(table, column, default) end begin update_column_in_batches(table, column, default, &block) change_column_null(table, column, false) unless allow_null # We want to rescue _all_ exceptions here, even those that don't inherit # from StandardError. rescue Exception => error # rubocop: disable all remove_column(table, column) raise error end end # Renames a column without requiring downtime. # # Concurrent renames work by using database triggers to ensure both the # old and new column are in sync. However, this method will _not_ remove # the triggers or the old column automatically; this needs to be done # manually in a post-deployment migration. This can be done using the # method `cleanup_concurrent_column_rename`. # # table - The name of the database table containing the column. # old - The old column name. # new - The new column name. # type - The type of the new column. If no type is given the old column's # type is used. def rename_column_concurrently(table, old, new, type: nil) if transaction_open? raise 'rename_column_concurrently can not be run inside a transaction' end check_trigger_permissions!(table) old_col = column_for(table, old) new_type = type || old_col.type add_column(table, new, new_type, limit: old_col.limit, precision: old_col.precision, scale: old_col.scale) # We set the default value _after_ adding the column so we don't end up # updating any existing data with the default value. This isn't # necessary since we copy over old values further down. change_column_default(table, new, old_col.default) if old_col.default install_rename_triggers(table, old, new) update_column_in_batches(table, new, Arel::Table.new(table)[old]) change_column_null(table, new, false) unless old_col.null copy_indexes(table, old, new) copy_foreign_keys(table, old, new) end # Installs triggers in a table that keep a new column in sync with an old # one. # # table - The name of the table to install the trigger in. # old_column - The name of the old column. # new_column - The name of the new column. def install_rename_triggers(table, old_column, new_column) trigger_name = rename_trigger_name(table, old_column, new_column) quoted_table = quote_table_name(table) quoted_old = quote_column_name(old_column) quoted_new = quote_column_name(new_column) if Database.postgresql? install_rename_triggers_for_postgresql(trigger_name, quoted_table, quoted_old, quoted_new) else install_rename_triggers_for_mysql(trigger_name, quoted_table, quoted_old, quoted_new) end end # Changes the type of a column concurrently. # # table - The table containing the column. # column - The name of the column to change. # new_type - The new column type. def change_column_type_concurrently(table, column, new_type) temp_column = "#{column}_for_type_change" rename_column_concurrently(table, column, temp_column, type: new_type) end # Performs cleanup of a concurrent type change. # # table - The table containing the column. # column - The name of the column to change. # new_type - The new column type. def cleanup_concurrent_column_type_change(table, column) temp_column = "#{column}_for_type_change" transaction do # This has to be performed in a transaction as otherwise we might have # inconsistent data. cleanup_concurrent_column_rename(table, column, temp_column) rename_column(table, temp_column, column) end end # Cleans up a concurrent column name. # # This method takes care of removing previously installed triggers as well # as removing the old column. # # table - The name of the database table. # old - The name of the old column. # new - The name of the new column. def cleanup_concurrent_column_rename(table, old, new) trigger_name = rename_trigger_name(table, old, new) check_trigger_permissions!(table) if Database.postgresql? remove_rename_triggers_for_postgresql(table, trigger_name) else remove_rename_triggers_for_mysql(trigger_name) end remove_column(table, old) end # Changes the column type of a table using a background migration. # # Because this method uses a background migration it's more suitable for # large tables. For small tables it's better to use # `change_column_type_concurrently` since it can complete its work in a # much shorter amount of time and doesn't rely on Sidekiq. # # Example usage: # # class Issue < ActiveRecord::Base # self.table_name = 'issues' # # include EachBatch # # def self.to_migrate # where('closed_at IS NOT NULL') # end # end # # change_column_type_using_background_migration( # Issue.to_migrate, # :closed_at, # :datetime_with_timezone # ) # # Reverting a migration like this is done exactly the same way, just with # a different type to migrate to (e.g. `:datetime` in the above example). # # relation - An ActiveRecord relation to use for scheduling jobs and # figuring out what table we're modifying. This relation _must_ # have the EachBatch module included. # # column - The name of the column for which the type will be changed. # # new_type - The new type of the column. # # batch_size - The number of rows to schedule in a single background # migration. # # interval - The time interval between every background migration. def change_column_type_using_background_migration( relation, column, new_type, batch_size: 10_000, interval: 10.minutes ) unless relation.model < EachBatch raise TypeError, 'The relation must include the EachBatch module' end temp_column = "#{column}_for_type_change" table = relation.table_name max_index = 0 add_column(table, temp_column, new_type) install_rename_triggers(table, column, temp_column) # Schedule the jobs that will copy the data from the old column to the # new one. Rows with NULL values in our source column are skipped since # the target column is already NULL at this point. relation.where.not(column => nil).each_batch(of: batch_size) do |batch, index| start_id, end_id = batch.pluck('MIN(id), MAX(id)').first max_index = index BackgroundMigrationWorker.perform_in( index * interval, 'CopyColumn', [table, column, temp_column, start_id, end_id] ) end # Schedule the renaming of the column to happen (initially) 1 hour after # the last batch finished. BackgroundMigrationWorker.perform_in( (max_index * interval) + 1.hour, 'CleanupConcurrentTypeChange', [table, column, temp_column] ) if perform_background_migration_inline? # To ensure the schema is up to date immediately we perform the # migration inline in dev / test environments. Gitlab::BackgroundMigration.steal('CopyColumn') Gitlab::BackgroundMigration.steal('CleanupConcurrentTypeChange') end end # Renames a column using a background migration. # # Because this method uses a background migration it's more suitable for # large tables. For small tables it's better to use # `rename_column_concurrently` since it can complete its work in a much # shorter amount of time and doesn't rely on Sidekiq. # # Example usage: # # rename_column_using_background_migration( # :users, # :feed_token, # :rss_token # ) # # table - The name of the database table containing the column. # # old - The old column name. # # new - The new column name. # # type - The type of the new column. If no type is given the old column's # type is used. # # batch_size - The number of rows to schedule in a single background # migration. # # interval - The time interval between every background migration. def rename_column_using_background_migration( table, old_column, new_column, type: nil, batch_size: 10_000, interval: 10.minutes ) check_trigger_permissions!(table) old_col = column_for(table, old_column) new_type = type || old_col.type max_index = 0 add_column(table, new_column, new_type, limit: old_col.limit, precision: old_col.precision, scale: old_col.scale) # We set the default value _after_ adding the column so we don't end up # updating any existing data with the default value. This isn't # necessary since we copy over old values further down. change_column_default(table, new_column, old_col.default) if old_col.default install_rename_triggers(table, old_column, new_column) model = Class.new(ActiveRecord::Base) do self.table_name = table include ::EachBatch end # Schedule the jobs that will copy the data from the old column to the # new one. Rows with NULL values in our source column are skipped since # the target column is already NULL at this point. model.where.not(old_column => nil).each_batch(of: batch_size) do |batch, index| start_id, end_id = batch.pluck('MIN(id), MAX(id)').first max_index = index BackgroundMigrationWorker.perform_in( index * interval, 'CopyColumn', [table, old_column, new_column, start_id, end_id] ) end # Schedule the renaming of the column to happen (initially) 1 hour after # the last batch finished. BackgroundMigrationWorker.perform_in( (max_index * interval) + 1.hour, 'CleanupConcurrentRename', [table, old_column, new_column] ) if perform_background_migration_inline? # To ensure the schema is up to date immediately we perform the # migration inline in dev / test environments. Gitlab::BackgroundMigration.steal('CopyColumn') Gitlab::BackgroundMigration.steal('CleanupConcurrentRename') end end def perform_background_migration_inline? Rails.env.test? || Rails.env.development? end # Performs a concurrent column rename when using PostgreSQL. def install_rename_triggers_for_postgresql(trigger, table, old, new) execute <<-EOF.strip_heredoc CREATE OR REPLACE FUNCTION #{trigger}() RETURNS trigger AS $BODY$ BEGIN NEW.#{new} := NEW.#{old}; RETURN NEW; END; $BODY$ LANGUAGE 'plpgsql' VOLATILE EOF execute <<-EOF.strip_heredoc CREATE TRIGGER #{trigger} BEFORE INSERT OR UPDATE ON #{table} FOR EACH ROW EXECUTE PROCEDURE #{trigger}() EOF end # Installs the triggers necessary to perform a concurrent column rename on # MySQL. def install_rename_triggers_for_mysql(trigger, table, old, new) execute <<-EOF.strip_heredoc CREATE TRIGGER #{trigger}_insert BEFORE INSERT ON #{table} FOR EACH ROW SET NEW.#{new} = NEW.#{old} EOF execute <<-EOF.strip_heredoc CREATE TRIGGER #{trigger}_update BEFORE UPDATE ON #{table} FOR EACH ROW SET NEW.#{new} = NEW.#{old} EOF end # Removes the triggers used for renaming a PostgreSQL column concurrently. def remove_rename_triggers_for_postgresql(table, trigger) execute("DROP TRIGGER IF EXISTS #{trigger} ON #{table}") execute("DROP FUNCTION IF EXISTS #{trigger}()") end # Removes the triggers used for renaming a MySQL column concurrently. def remove_rename_triggers_for_mysql(trigger) execute("DROP TRIGGER IF EXISTS #{trigger}_insert") execute("DROP TRIGGER IF EXISTS #{trigger}_update") end # Returns the (base) name to use for triggers when renaming columns. def rename_trigger_name(table, old, new) 'trigger_' + Digest::SHA256.hexdigest("#{table}_#{old}_#{new}").first(12) end # Returns an Array containing the indexes for the given column def indexes_for(table, column) column = column.to_s indexes(table).select { |index| index.columns.include?(column) } end # Returns an Array containing the foreign keys for the given column. def foreign_keys_for(table, column) column = column.to_s foreign_keys(table).select { |fk| fk.column == column } end # Copies all indexes for the old column to a new column. # # table - The table containing the columns and indexes. # old - The old column. # new - The new column. def copy_indexes(table, old, new) old = old.to_s new = new.to_s indexes_for(table, old).each do |index| new_columns = index.columns.map do |column| column == old ? new : column end # This is necessary as we can't properly rename indexes such as # "ci_taggings_idx". unless index.name.include?(old) raise "The index #{index.name} can not be copied as it does not "\ "mention the old column. You have to rename this index manually first." end name = index.name.gsub(old, new) options = { unique: index.unique, name: name, length: index.lengths, order: index.orders } # These options are not supported by MySQL, so we only add them if # they were previously set. options[:using] = index.using if index.using options[:where] = index.where if index.where unless index.opclasses.blank? opclasses = index.opclasses.dup # Copy the operator classes for the old column (if any) to the new # column. opclasses[new] = opclasses.delete(old) if opclasses[old] options[:opclasses] = opclasses end add_concurrent_index(table, new_columns, options) end end # Copies all foreign keys for the old column to the new column. # # table - The table containing the columns and indexes. # old - The old column. # new - The new column. def copy_foreign_keys(table, old, new) foreign_keys_for(table, old).each do |fk| add_concurrent_foreign_key(fk.from_table, fk.to_table, column: new, on_delete: fk.on_delete) end end # Returns the column for the given table and column name. def column_for(table, name) name = name.to_s columns(table).find { |column| column.name == name } end # This will replace the first occurance of a string in a column with # the replacement # On postgresql we can use `regexp_replace` for that. # On mysql we find the location of the pattern, and overwrite it # with the replacement def replace_sql(column, pattern, replacement) quoted_pattern = Arel::Nodes::Quoted.new(pattern.to_s) quoted_replacement = Arel::Nodes::Quoted.new(replacement.to_s) if Database.mysql? locate = Arel::Nodes::NamedFunction .new('locate', [quoted_pattern, column]) insert_in_place = Arel::Nodes::NamedFunction .new('insert', [column, locate, pattern.size, quoted_replacement]) Arel::Nodes::SqlLiteral.new(insert_in_place.to_sql) else replace = Arel::Nodes::NamedFunction .new("regexp_replace", [column, quoted_pattern, quoted_replacement]) Arel::Nodes::SqlLiteral.new(replace.to_sql) end end def remove_foreign_key_without_error(*args) remove_foreign_key(*args) rescue ArgumentError end def sidekiq_queue_migrate(queue_from, to:) while sidekiq_queue_length(queue_from) > 0 Sidekiq.redis do |conn| conn.rpoplpush "queue:#{queue_from}", "queue:#{to}" end end end def sidekiq_queue_length(queue_name) Sidekiq.redis do |conn| conn.llen("queue:#{queue_name}") end end def check_trigger_permissions!(table) unless Grant.create_and_execute_trigger?(table) dbname = Database.database_name user = Database.username raise <<-EOF Your database user is not allowed to create, drop, or execute triggers on the table #{table}. If you are using PostgreSQL you can solve this by logging in to the GitLab database (#{dbname}) using a super user and running: ALTER #{user} WITH SUPERUSER For MySQL you instead need to run: GRANT ALL PRIVILEGES ON *.* TO #{user}@'%' Both queries will grant the user super user permissions, ensuring you don't run into similar problems in the future (e.g. when new tables are created). EOF end end # Bulk queues background migration jobs for an entire table, batched by ID range. # "Bulk" meaning many jobs will be pushed at a time for efficiency. # If you need a delay interval per job, then use `queue_background_migration_jobs_by_range_at_intervals`. # # model_class - The table being iterated over # job_class_name - The background migration job class as a string # batch_size - The maximum number of rows per job # # Example: # # class Route < ActiveRecord::Base # include EachBatch # self.table_name = 'routes' # end # # bulk_queue_background_migration_jobs_by_range(Route, 'ProcessRoutes') # # Where the model_class includes EachBatch, and the background migration exists: # # class Gitlab::BackgroundMigration::ProcessRoutes # def perform(start_id, end_id) # # do something # end # end def bulk_queue_background_migration_jobs_by_range(model_class, job_class_name, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE) raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id') jobs = [] model_class.each_batch(of: batch_size) do |relation| start_id, end_id = relation.pluck('MIN(id), MAX(id)').first if jobs.length >= BACKGROUND_MIGRATION_JOB_BUFFER_SIZE # Note: This code path generally only helps with many millions of rows # We push multiple jobs at a time to reduce the time spent in # Sidekiq/Redis operations. We're using this buffer based approach so we # don't need to run additional queries for every range. BackgroundMigrationWorker.bulk_perform_async(jobs) jobs.clear end jobs << [job_class_name, [start_id, end_id]] end BackgroundMigrationWorker.bulk_perform_async(jobs) unless jobs.empty? end # Queues background migration jobs for an entire table, batched by ID range. # Each job is scheduled with a `delay_interval` in between. # If you use a small interval, then some jobs may run at the same time. # # model_class - The table or relation being iterated over # job_class_name - The background migration job class as a string # delay_interval - The duration between each job's scheduled time (must respond to `to_f`) # batch_size - The maximum number of rows per job # # Example: # # class Route < ActiveRecord::Base # include EachBatch # self.table_name = 'routes' # end # # queue_background_migration_jobs_by_range_at_intervals(Route, 'ProcessRoutes', 1.minute) # # Where the model_class includes EachBatch, and the background migration exists: # # class Gitlab::BackgroundMigration::ProcessRoutes # def perform(start_id, end_id) # # do something # end # end def queue_background_migration_jobs_by_range_at_intervals(model_class, job_class_name, delay_interval, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE) raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id') # To not overload the worker too much we enforce a minimum interval both # when scheduling and performing jobs. if delay_interval < BackgroundMigrationWorker::MIN_INTERVAL delay_interval = BackgroundMigrationWorker::MIN_INTERVAL end model_class.each_batch(of: batch_size) do |relation, index| start_id, end_id = relation.pluck('MIN(id), MAX(id)').first # `BackgroundMigrationWorker.bulk_perform_in` schedules all jobs for # the same time, which is not helpful in most cases where we wish to # spread the work over time. BackgroundMigrationWorker.perform_in(delay_interval * index, job_class_name, [start_id, end_id]) end end # Fetches indexes on a column by name for postgres. # # This will include indexes using an expression on the column, for example: # `CREATE INDEX CONCURRENTLY index_name ON table (LOWER(column));` # # For mysql, it falls back to the default ActiveRecord implementation that # will not find custom indexes. But it will select by name without passing # a column. # # We can remove this when upgrading to Rails 5 with an updated `index_exists?`: # - https://github.com/rails/rails/commit/edc2b7718725016e988089b5fb6d6fb9d6e16882 # # Or this can be removed when we no longer support postgres < 9.5, so we # can use `CREATE INDEX IF NOT EXISTS`. def index_exists_by_name?(table, index) # We can't fall back to the normal `index_exists?` method because that # does not find indexes without passing a column name. if indexes(table).map(&:name).include?(index.to_s) true elsif Gitlab::Database.postgresql? postgres_exists_by_name?(table, index) else false end end def postgres_exists_by_name?(table, name) index_sql = <<~SQL SELECT COUNT(*) FROM pg_index JOIN pg_class i ON (indexrelid=i.oid) JOIN pg_class t ON (indrelid=t.oid) WHERE i.relname = '#{name}' AND t.relname = '#{table}' SQL connection.select_value(index_sql).to_i > 0 end end end end
FrankSynth/MiniFrankInterface
interface/interfaceOut.hpp
#pragma once #include "interfaceMiddleman.hpp" #include <Arduino.h> #include <SPI.h> #define DAC1 8 #define DAC2 6 #define TRIGGER1 4 #define TRIGGER2 5 #define GATE1 2 #define GATE2 3 #define CLK1 0 #define CLK2 1 #define CLKLED 21 #define NOTESCALING 46.54f void setVoltage(int dacpin, bool channel, bool gain, unsigned int mV); // channel 0, 1, Gain = 2 (default) void initOutput(); class Channel { byte outputChannel; byte noteDac; byte noteDacChannel; byte cvDac; byte cvDacChannel; byte triggerPin; byte gatePin; public: Channel(byte outputChannel, byte noteDac, byte noteDacChannel, byte cvDac, byte cvDacChannel, byte triggerPin, byte gatePin) : outputChannel(outputChannel), noteDac(noteDac), noteDacChannel(noteDacChannel), cvDac(cvDac), cvDacChannel(cvDacChannel), triggerPin(triggerPin), gatePin(gatePin) {} void setTuning(float tuning); void setGate(byte state); void setTrigger(byte state); void setNote(byte note); void setCV(int value); }; class clock { public: clock(byte pin) : pin(pin) {} void setClock(byte state); private: byte pin; }; class ClkLed { public: void init(byte pin); void setClkLed(byte state); private: byte clkLed; };
BartlomiejSzczotka/football-events
football-common/src/main/java/org/djar/football/stream/KafkaStreamsStarter.java
package org.djar.football.stream; import static org.djar.football.util.Topics.TOPIC_NAME_PREFIX; import java.util.Properties; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.AdminClientConfig; import org.apache.kafka.clients.admin.KafkaAdminClient; import org.apache.kafka.common.errors.RetriableException; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.Topology; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class KafkaStreamsStarter { private static final Logger logger = LoggerFactory.getLogger(KafkaStreamsStarter.class); // the number of football topics - docker-compose.yml/kafka/KAFKA_CREATE_TOPICS private static final int FB_TOPIC_COUNT = 12; private final String kafkaBootstrapAddress; private final Topology topology; private final String applicationId; private long kafkaTimeout = 120000; private long streamsStartupTimeout = 20000; public KafkaStreamsStarter(String kafkaBootstrapAddress, Topology topology, String applicationId) { this.kafkaBootstrapAddress = kafkaBootstrapAddress; this.topology = topology; this.applicationId = applicationId; } public void setKafkaTimeout(long kafkaTimeout) { this.kafkaTimeout = kafkaTimeout; } public void setStreamsStartupTimeout(long streamsStartupTimeout) { this.streamsStartupTimeout = streamsStartupTimeout; } public KafkaStreams start() { Properties props = new Properties(); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapAddress); props.put(StreamsConfig.CLIENT_ID_CONFIG, applicationId); props.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 0); props.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId); props.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, "exactly_once"); props.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1); //commit asap final KafkaStreams kafkaStreams = new KafkaStreams(topology, props); Runtime.getRuntime().addShutdownHook(new Thread(kafkaStreams::close)); kafkaStreams.setUncaughtExceptionHandler((thread, exception) -> logger.error(thread.toString(), exception)); // wait for Kafka and football topics creation to avoid endless REBALANCING problem waitForKafkaAndTopics(); startStreams(kafkaStreams); logger.debug("Started Kafka Streams, Kafka bootstrap: {}", kafkaBootstrapAddress); return kafkaStreams; } private void waitForKafkaAndTopics() { Properties properties = new Properties(); properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapAddress); properties.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 2000); long timeout = System.currentTimeMillis() + kafkaTimeout; // football topics are created at Kafka startup // wait until all of them are created try (AdminClient client = KafkaAdminClient.create(properties)) { while (true) { Set<String> topicNames = null; try { topicNames = client.listTopics().names().get(); if (containsFootballTopics(topicNames)) { logger.trace("Required topics exist: {}", topicNames); break; } } catch (ExecutionException e) { // ignore retriable errors, especially timeouts if (!(e.getCause() instanceof RetriableException)) { throw new RuntimeException("Kafka connection error " + kafkaBootstrapAddress, e); } logger.trace("Trying to connect to Kafka {}", e.getMessage()); } checkTimeout(kafkaBootstrapAddress, timeout, topicNames); Thread.sleep(2000); } } catch (InterruptedException e) { Thread.currentThread().interrupted(); } } private void checkTimeout(String kafkaBootstrapAddress, long timeout, Set<String> topicNames) { if (System.currentTimeMillis() > timeout) { if (topicNames == null || topicNames.isEmpty()) { throw new RuntimeException("Timeout waiting for Kafka. Kafka is not available " + kafkaBootstrapAddress); } throw new RuntimeException("Timeout waiting for Kafka. " + "Some '" + TOPIC_NAME_PREFIX + "*' topics are missing, found only: " + topicNames); } } private boolean containsFootballTopics(Set<String> topicNames) { return topicNames.stream().filter(name -> name.startsWith(TOPIC_NAME_PREFIX)).count() == FB_TOPIC_COUNT; } private void startStreams(KafkaStreams kafkaStreams) { CountDownLatch streamsStartedLatch = new CountDownLatch(1); // wait for consistent state kafkaStreams.setStateListener((newState, oldState) -> { logger.trace("Kafka Streams state has been changed from {} to {}", oldState, newState); if (oldState == KafkaStreams.State.REBALANCING && newState == KafkaStreams.State.RUNNING) { streamsStartedLatch.countDown(); } }); kafkaStreams.cleanUp(); kafkaStreams.start(); long timeout = System.currentTimeMillis() + streamsStartupTimeout; try { streamsStartedLatch.await(timeout - System.currentTimeMillis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupted(); } KafkaStreams.State state = kafkaStreams.state(); if (state != KafkaStreams.State.RUNNING) { logger.error("Unable to start Kafka Streams in {} ms, the current state is {}", streamsStartupTimeout, state); System.exit(1); } } }
jduckett/duck_map
test/unit/config/reset_test.rb
require 'test_helper' class ResetTest < ActiveSupport::TestCase ################################################################################## test "DuckMap::Config.sitemap_attributes_hash should NEVER return nil" do # even after directly setting sitemap_attributes_hash to nil, it should # NEVER return nil DuckMap::Config.sitemap_attributes_hash = nil assert DuckMap::Config.sitemap_attributes_hash.kind_of?(Hash) end ################################################################################## test "DuckMap::Config.sitemap_attributes_hash should NEVER return nil using reset" do DuckMap::Config.reset assert DuckMap::Config.sitemap_attributes_hash.kind_of?(Hash) end ################################################################################## test "DuckMap::Config.reset(:sitemap_attributes_hash) should leave other values in tact" do assert !DuckMap::Config.sitemap_attributes_defined.kind_of?(NilClass) # go ahead and reset everything DuckMap::Config.reset # confirm default values assert DuckMap::Config.sitemap_attributes_hash[:default][:title].eql?(:title) assert DuckMap::Config.sitemap_attributes_hash[:index][:title].eql?(:title) assert DuckMap::Config.title.eql?("Untitled") assert DuckMap::Config.sitemap_attributes_defined.kind_of?(FalseClass) # set and confirm new values DuckMap::Config.title = "my title" assert DuckMap::Config.title.eql?("my title") DuckMap::Config.sitemap_attributes_defined = true assert DuckMap::Config.sitemap_attributes_defined.kind_of?(TrueClass) DuckMap::Config.sitemap_attributes_hash[:default][:title] = "default title" DuckMap::Config.sitemap_attributes_hash[:index][:title] = "index title" assert DuckMap::Config.sitemap_attributes_hash[:default][:title].eql?("default title") assert DuckMap::Config.sitemap_attributes_hash[:index][:title].eql?("index title") # reset sitemap_attributes_hash ONLY and leave others in tact DuckMap::Config.reset(:sitemap_attributes_hash) assert DuckMap::Config.sitemap_attributes_hash[:default][:title].eql?(:title) assert DuckMap::Config.sitemap_attributes_hash[:index][:title].eql?(:title) assert DuckMap::Config.title.eql?("my title") assert DuckMap::Config.sitemap_attributes_defined.kind_of?(TrueClass) end ################################################################################## test "DuckMap::Config.reset(:attributes) should leave other values in tact" do assert !DuckMap::Config.sitemap_attributes_defined.kind_of?(NilClass) # go ahead and reset everything DuckMap::Config.reset # confirm default values assert DuckMap::Config.sitemap_attributes_hash[:default][:title].eql?(:title) assert DuckMap::Config.sitemap_attributes_hash[:index][:title].eql?(:title) assert DuckMap::Config.title.eql?("Untitled") assert DuckMap::Config.keywords.kind_of?(NilClass) assert DuckMap::Config.sitemap_attributes_defined.kind_of?(FalseClass) # set and confirm new values DuckMap::Config.title = "my title" assert DuckMap::Config.title.eql?("my title") DuckMap::Config.keywords = "moe larry the cheese" assert DuckMap::Config.keywords.eql?("moe larry the cheese") DuckMap::Config.sitemap_attributes_defined = true assert DuckMap::Config.sitemap_attributes_defined.kind_of?(TrueClass) DuckMap::Config.sitemap_attributes_hash[:default][:title] = "default title" DuckMap::Config.sitemap_attributes_hash[:index][:title] = "index title" assert DuckMap::Config.sitemap_attributes_hash[:default][:title].eql?("default title") assert DuckMap::Config.sitemap_attributes_hash[:index][:title].eql?("index title") # reset sitemap_attributes_hash ONLY and leave others in tact DuckMap::Config.reset(:attributes) assert DuckMap::Config.sitemap_attributes_hash[:default][:title].eql?("default title") assert DuckMap::Config.sitemap_attributes_hash[:index][:title].eql?("index title") assert DuckMap::Config.title.eql?("Untitled") assert DuckMap::Config.keywords.kind_of?(NilClass) assert DuckMap::Config.sitemap_attributes_defined.kind_of?(TrueClass) end ################################################################################## test "DuckMap::Config.reset should not affect object instances" do # reset here in case other tests bigfoot the class variables. DuckMap::Config.reset # define a couple of objects class ResetObject01 include DuckMap::InheritableClassAttributes include DuckMap::Attributes end class ResetObject02 include DuckMap::InheritableClassAttributes include DuckMap::Attributes end # reset again just to satisfy being anal DuckMap::Config.reset # class method should always return nil until instantiated # by an instance of the object assert ResetObject01.sitemap_attributes_hash.kind_of?(NilClass) assert ResetObject02.sitemap_attributes_hash.kind_of?(NilClass) # create two separate objects a = ResetObject01.new b = ResetObject02.new # instantiate attributes on both objects assert a.sitemap_attributes.kind_of?(Hash) assert b.sitemap_attributes.kind_of?(Hash) # set some values a.sitemap_attributes(:default)[:title] = "default title" b.sitemap_attributes(:default)[:title] = "default title" assert a.sitemap_attributes(:default)[:title].eql?("default title") assert b.sitemap_attributes(:default)[:title].eql?("default title") # reset config DuckMap::Config.reset # confirm object instances have not been affected assert a.sitemap_attributes(:default)[:title].eql?("default title") assert b.sitemap_attributes(:default)[:title].eql?("default title") end end
xmangogogo1/Demo-dish-order-system
src/main/java/cmpe/dos/service/impl/ReceiveOrderServiceImpl.java
package cmpe.dos.service.impl; import cmpe.dos.dao.HibernateDao; import cmpe.dos.entity.Order; import cmpe.dos.service.ReceiveOrderService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Date; import java.util.List; @Service public class ReceiveOrderServiceImpl implements ReceiveOrderService { @Autowired HibernateDao<Order> odao; Date d; @Override public Order confirmReceiveOrder(Integer orderId, String username) { Date now = new Date(); Timestamp ts = new Timestamp(now.getTime()); Order updatedOrder = odao.doQueryUnique("from Order where orderId = ? and username = ? ",orderId, username); if(updatedOrder != null) { updatedOrder.setPickOrDeliveryTime(ts); odao.update(updatedOrder); } return updatedOrder; } @Override public List<Order> showNonReceivedOrder(String username) { String sql = "from Order where username = ? and pickOrDeliveryTime is null"; return odao.doQueryList(sql,true, username); } @Override public List<Order> showallUnreceivedOrders() { return odao.doQueryList("from Order where pickOrDeliveryTime is null",false); } @Override public Boolean confirmReceiveAnOrder(Integer orderId) { Date now = new Date(); Timestamp ts = new Timestamp(now.getTime()); Order confirmedOrder = odao.doQueryUnique("from Order where orderId = ? ", orderId); if(confirmedOrder != null) { confirmedOrder.setPickOrDeliveryTime(ts); odao.update(confirmedOrder); return true; } return false; } }
ZikiFlicky/Rael
src/types/module.h
<gh_stars>1-10 #ifndef RAEL_MODULE_H #define RAEL_MODULE_H #include "value.h" #include "varmap.h" /* declare module type */ extern RaelTypeValue RaelModuleType; typedef struct RaelModuleValue { RAEL_VALUE_BASE; char *name; } RaelModuleValue; /* create a RaelValue with the type of MethodFunc */ RaelValue *method_cfunc_new(RaelValue *method_self, char *name, RaelMethodFunc func); /* return a new initialized RaelModuleValue */ RaelValue *module_new(char *name); /* set a key inside of a module value */ void module_set_key(RaelModuleValue *self, char *varname, RaelValue *value); #endif /* RAEL_MODULE_H */
gbdrt/yaps
tests/yaps/regression_matrix.py
from yaps.lib import * import yaps as yaps """ data { int<lower=0> N; int<lower=0> K; matrix[N, K] x; vector[N] y; } parameters { real alpha; vector[K] beta; real<lower =0> sigma; } model { y ~ normal(x * beta + alpha, sigma); } """ N = yaps.dependent_type_var() K = yaps.dependent_type_var() @yaps.model def regression(N: int(lower=0), K: int(lower=0), x: matrix[N, K], y: vector[N]): alpha: real beta: vector[K] sigma: real(lower=0) y is normal(x * beta + alpha, sigma) print(regression)
Kunstenpunt/havelovewilltravel
hlwtadmin/migrations/0031_auto_20200529_0952.py
<gh_stars>1-10 # Generated by Django 3.0 on 2020-05-29 07:52 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hlwtadmin', '0030_auto_20200522_1032'), ] operations = [ migrations.AddField( model_name='historicalorganisation', name='sort_name', field=models.CharField(default='No sort name provided', max_length=200), preserve_default=False, ), migrations.AddField( model_name='organisation', name='sort_name', field=models.CharField(default='No sort name provided', max_length=200), preserve_default=False, ), ]
JaneMandy/CS
org/apache/xpath/operations/Number.java
<reponame>JaneMandy/CS package org.apache.xpath.operations; import javax.xml.transform.TransformerException; import org.apache.xpath.XPathContext; import org.apache.xpath.objects.XNumber; import org.apache.xpath.objects.XObject; public class Number extends UnaryOperation { public XObject operate(XObject right) throws TransformerException { return (XObject)(2 == right.getType() ? right : new XNumber(right.num())); } public double num(XPathContext xctxt) throws TransformerException { return super.m_right.num(xctxt); } }
jjzhang166/balancer
contrib/libs/matrixssl3/core/coreConfig.h
<gh_stars>10-100 /* * coreConfig.h * Release $Name: MATRIXSSL-3-2-1-OPEN $ * * Configuration settings for PeerSec core module */ /* * Copyright (c) PeerSec Networks, 2002-2011. All Rights Reserved. * The latest version of this code is available at http://www.matrixssl.org * * This software is open source; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This General Public License does NOT permit incorporating this software * into proprietary programs. If you are unable to comply with the GPL, a * commercial license for this software may be purchased from PeerSec Networks * at http://www.peersec.com * * This program is distributed in WITHOUT ANY WARRANTY; without even the * implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * http://www.gnu.org/copyleft/gpl.html */ /******************************************************************************/ #ifndef _h_PS_CORECONFIG #define _h_PS_CORECONFIG /******************************************************************************/ /* Configurable features */ /******************************************************************************/ /* If enabled, calls to the psError set of APIs will perform a platform abort on the exeutable to aid in debugging. */ /* #define HALT_ON_PS_ERROR */ /* NOT RECOMMENDED FOR PRODUCTION BUILDS */ /******************************************************************************/ /* Turn on the psTraceCore set of APIs for log trace of the core module */ /* #define USE_CORE_TRACE */ /******************************************************************************/ /* Include the osdepMutex family of APIs */ #define USE_MULTITHREADING /******************************************************************************/ /* Does the platform support a native 64-bit data type (long long)? Most 32 bit platforms do support native 64 bit integers, or at least can accumulate 32 bit products into a 64 bit register result. (Formerly this option was named USE_INT64) */ #define HAVE_NATIVE_INT64 #endif /* _h_PS_CORECONFIG */ /******************************************************************************/
uipowerranger/mysqldb
services/OrderService.js
const knex = require("../helpers/db_connect"); const moment = require("moment"); const { v4: uuidv4 } = require("uuid"); const createOrder = async (request) => { const { total_amount, first_name, last_name, email_id, phone_number, alternate_phone = "", mailing_address: { address1, address2 = "", city, state, postcode }, shipping_address: { address1: saddress1, address2: saddress2 = "", city: scity, state: sstate, postcode: spostcode, }, state_details, redeempoints_used = 0, delivery_charges = 0, user, items, order_date, } = request; let order_uuid = uuidv4(); let oid = await knex("orders").insert({ user, order_uuid: order_uuid, order_date, status: 1, total_amount, delivery_charges, first_name, last_name, email_id, phone_number, alternate_phone, mailing_address_address1: address1, mailing_address_address2: address2, mailing_address_city: city, mailing_address_state: state, mailing_address_postcode: postcode, shipping_address_address1: saddress1, shipping_address_address2: saddress2, shipping_address_city: scity, shipping_address_state: sstate, shipping_address_postcode: spostcode, payment: 0, order_completed: 0, state_details, redeempoints_used, }); items.map(async (item) => { await knex("orders_items").insert({ order_id: oid[0], order_uuid: order_uuid, item_id: item.item_id, item_name: item.item_name, item_image: item.item_image, quantity: item.quantity, price: item.price, amount: item.amount, status: 1, }); }); return { order_id: order_uuid, order_date: order_date, }; }; const getOrderById = async (id) => { return knex("orders") .where({ order_uuid: id, }) .select(); }; const updatePayment = async (id, status) => { return knex("orders") .where({ order_uuid: id, }) .update({ payment: status, }); }; module.exports = { createOrder, getOrderById, updatePayment, };
wildfly-boot/wildfly-boot
bootstrap/src/test/java/org/wildfly/swarm/bootstrap/util/WildFlySwarmDependenciesConfTest.java
<reponame>wildfly-boot/wildfly-boot<filename>bootstrap/src/test/java/org/wildfly/swarm/bootstrap/util/WildFlySwarmDependenciesConfTest.java /** * Copyright 2015-2016 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.swarm.bootstrap.util; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.util.List; import java.util.stream.Collectors; import org.junit.Test; import static org.fest.assertions.Assertions.assertThat; /** * @author <NAME> */ public class WildFlySwarmDependenciesConfTest { @Test public void testRoundTrip() throws Exception { WildFlySwarmDependenciesConf conf = new WildFlySwarmDependenciesConf(); conf.addPrimaryDependency(MavenArtifactDescriptor.fromMscGav("org.jboss.spec.javax.enterprise.concurrent:jboss-concurrency-api_1.0_spec:1.0.0.Final")); conf.addPrimaryDependency(MavenArtifactDescriptor.fromMscGav("org.jboss.spec.javax.servlet:jboss-servlet-api_3.1_spec:1.0.0.Final")); conf.addExtraDependency(MavenArtifactDescriptor.fromMavenGav("org.jolokia:jolokia-war:war:1.3.2")); ByteArrayOutputStream out = new ByteArrayOutputStream(); conf.write(out); String written = new String(out.toByteArray()); String[] lines = written.split("(\\r)?(\\n)"); assertThat(lines).hasSize(3); assertThat(lines).contains("primary:org.jboss.spec.javax.enterprise.concurrent:jboss-concurrency-api_1.0_spec:1.0.0.Final"); assertThat(lines).contains("primary:org.jboss.spec.javax.servlet:jboss-servlet-api_3.1_spec:1.0.0.Final"); assertThat(lines).contains("extra:org.jolokia:jolokia-war:war:1.3.2"); ByteArrayInputStream in = new ByteArrayInputStream(written.getBytes()); conf = new WildFlySwarmDependenciesConf(in); assertThat(conf.getPrimaryDependencies()).hasSize(2); List<String> primaryGavs = conf.getPrimaryDependencies().stream().map(e -> e.mscGav()).collect(Collectors.toList()); assertThat(primaryGavs).contains("org.jboss.spec.javax.enterprise.concurrent:jboss-concurrency-api_1.0_spec:1.0.0.Final"); assertThat(primaryGavs).contains("org.jboss.spec.javax.servlet:jboss-servlet-api_3.1_spec:1.0.0.Final"); assertThat(conf.getExtraDependencies()).hasSize(1); List<String> extraGavs = conf.getExtraDependencies().stream().map(e -> e.mavenGav()).collect(Collectors.toList()); assertThat(extraGavs).contains("org.jolokia:jolokia-war:war:1.3.2"); } }
Mortimyrrh/Mycelia-Forge
build/tmp/expandedArchives/forge-1.16.5-36.1.0_mapped_official_1.16.5-sources.jar_01fb3b8234f72f7172716347a075bc60/net/minecraft/client/renderer/entity/model/TintedAgeableModel.java
package net.minecraft.client.renderer.entity.model; import com.mojang.blaze3d.matrix.MatrixStack; import com.mojang.blaze3d.vertex.IVertexBuilder; import net.minecraft.entity.Entity; import net.minecraftforge.api.distmarker.Dist; import net.minecraftforge.api.distmarker.OnlyIn; @OnlyIn(Dist.CLIENT) public abstract class TintedAgeableModel<E extends Entity> extends AgeableModel<E> { private float r = 1.0F; private float g = 1.0F; private float b = 1.0F; public void setColor(float p_228253_1_, float p_228253_2_, float p_228253_3_) { this.r = p_228253_1_; this.g = p_228253_2_; this.b = p_228253_3_; } public void renderToBuffer(MatrixStack p_225598_1_, IVertexBuilder p_225598_2_, int p_225598_3_, int p_225598_4_, float p_225598_5_, float p_225598_6_, float p_225598_7_, float p_225598_8_) { super.renderToBuffer(p_225598_1_, p_225598_2_, p_225598_3_, p_225598_4_, this.r * p_225598_5_, this.g * p_225598_6_, this.b * p_225598_7_, p_225598_8_); } }
SongZhao/quickstep-with-BW
types/operations/comparisons/Comparison-inl.hpp
<filename>types/operations/comparisons/Comparison-inl.hpp /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. **/ #ifndef QUICKSTEP_TYPES_OPERATIONS_COMPARISONS_COMPARATORS_INL_HPP_ #define QUICKSTEP_TYPES_OPERATIONS_COMPARISONS_COMPARATORS_INL_HPP_ #include "types/operations/comparisons/Comparison.hpp" #include <cstddef> #include "catalog/CatalogTypedefs.hpp" #include "storage/TupleIdSequence.hpp" #ifdef QUICKSTEP_ENABLE_VECTOR_COPY_ELISION_SELECTION #include "storage/ValueAccessor.hpp" #include "storage/ValueAccessorUtil.hpp" #endif // QUICKSTEP_ENABLE_VECTOR_COPY_ELISION_SELECTION #include "types/TypedValue.hpp" #include "types/containers/ColumnVector.hpp" #include "types/containers/ColumnVectorUtil.hpp" #include "glog/logging.h" namespace quickstep { template <bool left_nullable, bool right_nullable> TupleIdSequence* UncheckedComparator::compareColumnVectorsDefaultImpl( const ColumnVector &left, const ColumnVector &right, const TupleIdSequence *filter, const TupleIdSequence *existence_bitmap) const { return InvokeOnColumnVector( left, [&](const auto &left_column_vector) -> TupleIdSequence* { // NOLINT(build/c++11) return InvokeOnColumnVector( right, [&](const auto &right_column_vector) -> TupleIdSequence* { // NOLINT(build/c++11) #ifdef QUICKSTEP_ENABLE_VECTOR_PREDICATE_SHORT_CIRCUIT static constexpr bool short_circuit = true; #else static constexpr bool short_circuit = false; #endif DCHECK_EQ(left_column_vector.size(), right_column_vector.size()); DCHECK((existence_bitmap == nullptr) || (existence_bitmap->numTuples() == left_column_vector.size())); DCHECK((filter == nullptr) || ((existence_bitmap == nullptr) ? (filter->length() == left_column_vector.size()) : (filter->length() == existence_bitmap->length()))); TupleIdSequence *result = new TupleIdSequence( (existence_bitmap == nullptr) ? left_column_vector.size() : existence_bitmap->length()); if (short_circuit && (filter != nullptr)) { if (existence_bitmap != nullptr) { TupleIdSequence::const_iterator existence_it = existence_bitmap->begin(); for (std::size_t cv_pos = 0; cv_pos < left_column_vector.size(); ++cv_pos) { result->set(*existence_it, filter->get(*existence_it) && this->compareDataPtrs( left_column_vector.template getUntypedValue<left_nullable>(cv_pos), right_column_vector.template getUntypedValue<right_nullable>(cv_pos))); ++existence_it; } } else { for (TupleIdSequence::const_iterator filter_it = filter->begin(); filter_it != filter->end(); ++filter_it) { result->set(*filter_it, this->compareDataPtrs( left_column_vector.template getUntypedValue<left_nullable>(*filter_it), right_column_vector.template getUntypedValue<right_nullable>(*filter_it))); } } } else { if (existence_bitmap != nullptr) { TupleIdSequence::const_iterator existence_it = existence_bitmap->begin(); for (std::size_t cv_pos = 0; cv_pos < left_column_vector.size(); ++cv_pos) { result->set(*existence_it, this->compareDataPtrs( left_column_vector.template getUntypedValue<left_nullable>(cv_pos), right_column_vector.template getUntypedValue<right_nullable>(cv_pos))); ++existence_it; } } else { for (std::size_t pos = 0; pos < left_column_vector.size(); ++pos) { result->set(pos, this->compareDataPtrs( left_column_vector.template getUntypedValue<left_nullable>(pos), right_column_vector.template getUntypedValue<right_nullable>(pos))); } } if (!short_circuit && (filter != nullptr)) { result->intersectWith(*filter); } } return result; }); }); } template <bool left_nullable, bool right_nullable> TupleIdSequence* UncheckedComparator::compareColumnVectorAndStaticValueDefaultImpl( const ColumnVector &left, const TypedValue &right, const TupleIdSequence *filter, const TupleIdSequence *existence_bitmap) const { return InvokeOnColumnVector( left, [&](const auto &column_vector) -> TupleIdSequence* { // NOLINT(build/c++11) #ifdef QUICKSTEP_ENABLE_VECTOR_PREDICATE_SHORT_CIRCUIT static constexpr bool short_circuit = true; #else static constexpr bool short_circuit = false; #endif DCHECK((existence_bitmap == nullptr) || (existence_bitmap->numTuples() == column_vector.size())); DCHECK((filter == nullptr) || ((existence_bitmap == nullptr) ? (filter->length() == column_vector.size()) : (filter->length() == existence_bitmap->length()))); TupleIdSequence *result = new TupleIdSequence( (existence_bitmap == nullptr) ? column_vector.size() : existence_bitmap->length()); if (right_nullable && right.isNull()) { return result; } if (short_circuit && (filter != nullptr)) { if (existence_bitmap != nullptr) { TupleIdSequence::const_iterator existence_it = existence_bitmap->begin(); for (std::size_t cv_pos = 0; cv_pos < column_vector.size(); ++cv_pos) { result->set(*existence_it, filter->get(*existence_it) && this->compareDataPtrWithTypedValue( column_vector.template getUntypedValue<left_nullable>(cv_pos), right)); ++existence_it; } } else { for (TupleIdSequence::const_iterator filter_it = filter->begin(); filter_it != filter->end(); ++filter_it) { result->set(*filter_it, this->compareDataPtrWithTypedValue( column_vector.template getUntypedValue<left_nullable>(*filter_it), right)); } } } else { if (existence_bitmap != nullptr) { TupleIdSequence::const_iterator existence_it = existence_bitmap->begin(); for (std::size_t cv_pos = 0; cv_pos < column_vector.size(); ++cv_pos) { result->set(*existence_it, this->compareDataPtrWithTypedValue( column_vector.template getUntypedValue<left_nullable>(cv_pos), right)); ++existence_it; } } else { for (std::size_t pos = 0; pos < column_vector.size(); ++pos) { result->set(pos, this->compareDataPtrWithTypedValue( column_vector.template getUntypedValue<left_nullable>(pos), right)); } } if (!short_circuit && (filter != nullptr)) { result->intersectWith(*filter); } } return result; }); } template <bool left_nullable, bool right_nullable> TupleIdSequence* UncheckedComparator::compareStaticValueAndColumnVectorDefaultImpl( const TypedValue &left, const ColumnVector &right, const TupleIdSequence *filter, const TupleIdSequence *existence_bitmap) const { return InvokeOnColumnVector( right, [&](const auto &column_vector) -> TupleIdSequence* { // NOLINT(build/c++11) #ifdef QUICKSTEP_ENABLE_VECTOR_PREDICATE_SHORT_CIRCUIT static constexpr bool short_circuit = true; #else static constexpr bool short_circuit = false; #endif DCHECK((existence_bitmap == nullptr) || (existence_bitmap->numTuples() == column_vector.size())); DCHECK((filter == nullptr) || ((existence_bitmap == nullptr) ? (filter->length() == column_vector.size()) : (filter->length() == existence_bitmap->length()))); TupleIdSequence *result = new TupleIdSequence( (existence_bitmap == nullptr) ? column_vector.size() : existence_bitmap->length()); if (left_nullable && left.isNull()) { return result; } if (short_circuit && (filter != nullptr)) { if (existence_bitmap != nullptr) { TupleIdSequence::const_iterator existence_it = existence_bitmap->begin(); for (std::size_t cv_pos = 0; cv_pos < column_vector.size(); ++cv_pos) { result->set(*existence_it, filter->get(*existence_it) && this->compareTypedValueWithDataPtr( left, column_vector.template getUntypedValue<right_nullable>(cv_pos))); ++existence_it; } } else { for (TupleIdSequence::const_iterator filter_it = filter->begin(); filter_it != filter->end(); ++filter_it) { result->set(*filter_it, this->compareTypedValueWithDataPtr( left, column_vector.template getUntypedValue<right_nullable>(*filter_it))); } } } else { if (existence_bitmap != nullptr) { TupleIdSequence::const_iterator existence_it = existence_bitmap->begin(); for (std::size_t cv_pos = 0; cv_pos < column_vector.size(); ++cv_pos) { result->set(*existence_it, this->compareTypedValueWithDataPtr( left, column_vector.template getUntypedValue<right_nullable>(cv_pos))); ++existence_it; } } else { for (std::size_t pos = 0; pos < column_vector.size(); ++pos) { result->set(pos, this->compareTypedValueWithDataPtr( left, column_vector.template getUntypedValue<right_nullable>(pos))); } } if (!short_circuit && (filter != nullptr)) { result->intersectWith(*filter); } } return result; }); } #ifdef QUICKSTEP_ENABLE_VECTOR_COPY_ELISION_SELECTION template <bool left_nullable, bool right_nullable> TupleIdSequence* UncheckedComparator::compareSingleValueAccessorDefaultImpl( ValueAccessor *accessor, const attribute_id left_id, const attribute_id right_id, const TupleIdSequence *filter) const { return InvokeOnValueAccessorMaybeTupleIdSequenceAdapter( accessor, [&](auto *accessor) -> TupleIdSequence* { // NOLINT(build/c++11) #ifdef QUICKSTEP_ENABLE_VECTOR_PREDICATE_SHORT_CIRCUIT static constexpr bool short_circuit = true; #else static constexpr bool short_circuit = false; #endif TupleIdSequence *result = new TupleIdSequence(accessor->getEndPosition()); if (short_circuit && (filter != nullptr)) { DCHECK_EQ(filter->length(), result->length()); for (TupleIdSequence::const_iterator filter_it = filter->begin(); filter_it != filter->end(); ++filter_it) { result->set(*filter_it, this->compareDataPtrs( accessor->template getUntypedValueAtAbsolutePosition<left_nullable>(left_id, *filter_it), accessor->template getUntypedValueAtAbsolutePosition<right_nullable>(right_id, *filter_it))); } } else { accessor->beginIteration(); if (accessor->isColumnAccessorSupported()) { // If ColumnAccessor is supported on the underlying accessor, we have a fast strided // column accessor available for the iteration on the underlying block. std::unique_ptr<const ColumnAccessor<left_nullable>> left_column_accessor(accessor->template getColumnAccessor<left_nullable>(left_id)); std::unique_ptr<const ColumnAccessor<right_nullable>> right_column_accessor(accessor->template getColumnAccessor<right_nullable>(right_id)); DCHECK(left_column_accessor != nullptr); DCHECK(right_column_accessor != nullptr); while (accessor->next()) { result->set(accessor->getCurrentPosition(), this->compareDataPtrs( left_column_accessor->getUntypedValue(), right_column_accessor->getUntypedValue())); } } else { while (accessor->next()) { result->set(accessor->getCurrentPosition(), this->compareDataPtrs( accessor->template getUntypedValue<left_nullable>(left_id), accessor->template getUntypedValue<right_nullable>(right_id))); } } if (!short_circuit && (filter != nullptr)) { result->intersectWith(*filter); } } return result; }); } template <bool left_nullable, bool right_nullable> TupleIdSequence* UncheckedComparator::compareValueAccessorAndStaticValueDefaultImpl( ValueAccessor *left_accessor, const attribute_id left_id, const TypedValue &right, const TupleIdSequence *filter) const { return InvokeOnValueAccessorMaybeTupleIdSequenceAdapter( left_accessor, [&](auto *left_accessor) -> TupleIdSequence* { // NOLINT(build/c++11) #ifdef QUICKSTEP_ENABLE_VECTOR_PREDICATE_SHORT_CIRCUIT static constexpr bool short_circuit = true; #else static constexpr bool short_circuit = false; #endif TupleIdSequence *result = new TupleIdSequence(left_accessor->getEndPosition()); if (right_nullable && right.isNull()) { return result; } if (short_circuit && (filter != nullptr)) { DCHECK_EQ(filter->length(), result->length()); for (TupleIdSequence::const_iterator filter_it = filter->begin(); filter_it != filter->end(); ++filter_it) { result->set(*filter_it, this->compareDataPtrWithTypedValue( left_accessor->template getUntypedValueAtAbsolutePosition<left_nullable>( left_id, *filter_it), right)); } } else { left_accessor->beginIteration(); while (left_accessor->next()) { result->set(left_accessor->getCurrentPosition(), this->compareDataPtrWithTypedValue( left_accessor->template getUntypedValue<left_nullable>(left_id), right)); } if (!short_circuit && (filter != nullptr)) { result->intersectWith(*filter); } } return result; }); } template <bool left_nullable, bool right_nullable> TupleIdSequence* UncheckedComparator::compareStaticValueAndValueAccessorDefaultImpl( const TypedValue &left, ValueAccessor *right_accessor, const attribute_id right_id, const TupleIdSequence *filter) const { return InvokeOnValueAccessorMaybeTupleIdSequenceAdapter( right_accessor, [&](auto *right_accessor) -> TupleIdSequence* { // NOLINT(build/c++11) #ifdef QUICKSTEP_ENABLE_VECTOR_PREDICATE_SHORT_CIRCUIT static constexpr bool short_circuit = true; #else static constexpr bool short_circuit = false; #endif TupleIdSequence *result = new TupleIdSequence(right_accessor->getEndPosition()); if (left_nullable && left.isNull()) { return result; } if (short_circuit && (filter != nullptr)) { DCHECK_EQ(filter->length(), result->length()); for (TupleIdSequence::const_iterator filter_it = filter->begin(); filter_it != filter->end(); ++filter_it) { result->set(*filter_it, this->compareTypedValueWithDataPtr( left, right_accessor->template getUntypedValueAtAbsolutePosition<right_nullable>( right_id, *filter_it))); } } else { right_accessor->beginIteration(); while (right_accessor->next()) { result->set(right_accessor->getCurrentPosition(), this->compareTypedValueWithDataPtr( left, right_accessor->template getUntypedValue<right_nullable>(right_id))); } if (!short_circuit && (filter != nullptr)) { result->intersectWith(*filter); } } return result; }); } template <bool left_nullable, bool right_nullable> TupleIdSequence* UncheckedComparator::compareColumnVectorAndValueAccessorDefaultImpl( const ColumnVector &left, ValueAccessor *right_accessor, const attribute_id right_id, const TupleIdSequence *filter, const TupleIdSequence *existence_bitmap) const { return InvokeOnColumnVector( left, [&](const auto &column_vector) -> TupleIdSequence* { // NOLINT(build/c++11) return InvokeOnValueAccessorMaybeTupleIdSequenceAdapter( right_accessor, [&](auto *right_accessor) -> TupleIdSequence* { // NOLINT(build/c++11) #ifdef QUICKSTEP_ENABLE_VECTOR_PREDICATE_SHORT_CIRCUIT static constexpr bool short_circuit = true; #else static constexpr bool short_circuit = false; #endif DCHECK_EQ(column_vector.size(), static_cast<std::size_t>(right_accessor->getNumTuples())); DCHECK((existence_bitmap == nullptr) || (existence_bitmap->numTuples() == column_vector.size())); DCHECK((filter == nullptr) || ((existence_bitmap == nullptr) ? (filter->length() == column_vector.size()) : (filter->length() == existence_bitmap->length()))); TupleIdSequence *result = new TupleIdSequence( (existence_bitmap == nullptr) ? column_vector.size() : existence_bitmap->length()); if (short_circuit && (filter != nullptr)) { if (existence_bitmap != nullptr) { TupleIdSequence::const_iterator existence_it = existence_bitmap->begin(); for (std::size_t cv_pos = 0; cv_pos < column_vector.size(); ++cv_pos) { result->set(*existence_it, filter->get(*existence_it) && this->compareDataPtrs( column_vector.template getUntypedValue<left_nullable>(cv_pos), right_accessor->template getUntypedValueAtAbsolutePosition<right_nullable>( right_id, *existence_it))); ++existence_it; } } else { for (TupleIdSequence::const_iterator filter_it = filter->begin(); filter_it != filter->end(); ++filter_it) { result->set(*filter_it, this->compareDataPtrs( column_vector.template getUntypedValue<left_nullable>(*filter_it), right_accessor->template getUntypedValueAtAbsolutePosition<right_nullable>( right_id, *filter_it))); } } } else { if (existence_bitmap != nullptr) { TupleIdSequence::const_iterator existence_it = existence_bitmap->begin(); for (std::size_t cv_pos = 0; cv_pos < column_vector.size(); ++cv_pos) { result->set(*existence_it, this->compareDataPtrs( column_vector.template getUntypedValue<left_nullable>(cv_pos), right_accessor->template getUntypedValueAtAbsolutePosition<right_nullable>( right_id, *existence_it))); ++existence_it; } } else { right_accessor->beginIteration(); std::size_t cv_pos = 0; while (right_accessor->next()) { result->set(right_accessor->getCurrentPosition(), this->compareDataPtrs( column_vector.template getUntypedValue<left_nullable>(cv_pos), right_accessor->template getUntypedValue<right_nullable>(right_id))); ++cv_pos; } } if (!short_circuit && (filter != nullptr)) { result->intersectWith(*filter); } } return result; }); }); } template <bool left_nullable, bool right_nullable> TupleIdSequence* UncheckedComparator::compareValueAccessorAndColumnVectorDefaultImpl( ValueAccessor *left_accessor, const attribute_id left_id, const ColumnVector &right, const TupleIdSequence *filter, const TupleIdSequence *existence_bitmap) const { return InvokeOnValueAccessorMaybeTupleIdSequenceAdapter( left_accessor, [&](auto *left_accessor) -> TupleIdSequence* { // NOLINT(build/c++11) return InvokeOnColumnVector( right, [&](const auto &column_vector) -> TupleIdSequence* { // NOLINT(build/c++11) #ifdef QUICKSTEP_ENABLE_VECTOR_PREDICATE_SHORT_CIRCUIT static constexpr bool short_circuit = true; #else static constexpr bool short_circuit = false; #endif DCHECK_EQ(column_vector.size(), static_cast<std::size_t>(left_accessor->getNumTuples())); DCHECK((existence_bitmap == nullptr) || (existence_bitmap->numTuples() == column_vector.size())); DCHECK((filter == nullptr) || ((existence_bitmap == nullptr) ? (filter->length() == column_vector.size()) : (filter->length() == existence_bitmap->length()))); TupleIdSequence *result = new TupleIdSequence( (existence_bitmap == nullptr) ? column_vector.size() : existence_bitmap->length()); if (short_circuit && (filter != nullptr)) { if (existence_bitmap != nullptr) { TupleIdSequence::const_iterator existence_it = existence_bitmap->begin(); for (std::size_t cv_pos = 0; cv_pos < column_vector.size(); ++cv_pos) { result->set(*existence_it, filter->get(*existence_it) && this->compareDataPtrs( left_accessor->template getUntypedValueAtAbsolutePosition<left_nullable>( left_id, *existence_it), column_vector.template getUntypedValue<right_nullable>(cv_pos))); ++existence_it; } } else { for (TupleIdSequence::const_iterator filter_it = filter->begin(); filter_it != filter->end(); ++filter_it) { result->set(*filter_it, this->compareDataPtrs( left_accessor->template getUntypedValueAtAbsolutePosition<left_nullable>( left_id, *filter_it), column_vector.template getUntypedValue<right_nullable>(*filter_it))); } } } else { if (existence_bitmap != nullptr) { TupleIdSequence::const_iterator existence_it = existence_bitmap->begin(); for (std::size_t cv_pos = 0; cv_pos < column_vector.size(); ++cv_pos) { result->set(*existence_it, this->compareDataPtrs( left_accessor->template getUntypedValueAtAbsolutePosition<left_nullable>( left_id, *existence_it), column_vector.template getUntypedValue<right_nullable>(cv_pos))); ++existence_it; } } else { left_accessor->beginIteration(); std::size_t cv_pos = 0; while (left_accessor->next()) { result->set(left_accessor->getCurrentPosition(), this->compareDataPtrs( left_accessor->template getUntypedValue<left_nullable>(left_id), column_vector.template getUntypedValue<right_nullable>(cv_pos))); ++cv_pos; } } if (!short_circuit && (filter != nullptr)) { result->intersectWith(*filter); } } return result; }); }); } template <bool value_nullable> TypedValue UncheckedComparator::accumulateValueAccessorDefaultImpl( const TypedValue &current, ValueAccessor *accessor, const attribute_id value_accessor_id) const { TypedValue accumulated = current; InvokeOnValueAccessorMaybeTupleIdSequenceAdapter( accessor, [&](auto *accessor) -> void { // NOLINT(build/c++11) accessor->beginIteration(); while (accessor->next()) { TypedValue value = accessor->getTypedValue(value_accessor_id); if (value_nullable && value.isNull()) { continue; } if (accumulated.isNull() || this->compareTypedValues(value, accumulated)) { accumulated = value; } } }); return accumulated; } #endif // QUICKSTEP_ENABLE_VECTOR_COPY_ELISION_SELECTION template <bool value_nullable> TypedValue UncheckedComparator::accumulateColumnVectorDefaultImpl( const TypedValue &current, const ColumnVector &column_vector) const { TypedValue accumulated = current; InvokeOnColumnVector( column_vector, [&](const auto &column_vector) -> void { // NOLINT(build/c++11) for (std::size_t pos = 0; pos < column_vector.size(); ++pos) { TypedValue value = column_vector.getTypedValue(pos); if (value_nullable && value.isNull()) { continue; } if (accumulated.isNull() || this->compareTypedValues(value, accumulated)) { accumulated = value; } } }); return accumulated; } } // namespace quickstep #endif // QUICKSTEP_TYPES_OPERATIONS_COMPARISONS_COMPARATORS_INL_HPP_
quidphp/front
js/include/uri.js
<reponame>quidphp/front /* * This file is part of the QuidPHP package <https://quidphp.com> * Author: <NAME> <<EMAIL>> * License: https://github.com/quidphp/front/blob/master/LICENSE */ // uri // script with a set of helper functions related to uri management const Uri = Quid.Uri = { // isInternal // retourne vrai si l'uri et la comparaison ont le même scheme et host isInternal: function(uri,compare) { let r = false; if(Str.is(uri)) { compare = (Str.is(compare))? this.parse(compare):Request.parse(); const parse = this.parse(uri); if(parse.protocol === compare.protocol && parse.host === compare.host) r = true; } return r; }, // isExternal // retourne vrai si l'uri et la comparaison n'ont pas le même scheme et host isExternal: function(uri,compare) { return (this.isInternal(uri,compare))? false:true; }, // hasExtension // retourne vrai si l'uri a une extension hasExtension: function(uri) { return this.extension(uri) != null; }, // hasFragment // retourne vrai si l'uri a un hash hasFragment: function(uri) { let r = false; if(Str.is(uri)) { const parse = this.parse(uri); if(Str.isNotEmpty(parse.hash)) r = true; } return r; }, // isOnlyHash // retourne vrai si l'uri est seulement un hash isOnlyHash: function(value) { return (Str.length(value) > 1 && Str.isStart('#',value)); }, // isSamePathQuery // retourne vrai si l'uri est la même que la comparaison // compare path et query isSamePathQuery: function(uri,compare) { let r = false; if(Str.is(uri)) { compare = (Str.is(compare))? this.parse(compare):Request.parse(); const parse = this.parse(uri); if(parse.pathname === compare.pathname && parse.search === compare.search) r = true; } return r; }, // isSamePathQueryHash // retourne vrai si l'uri est la même que la comparaison // compare path, query et hash isSamePathQueryHash: function(uri,compare) { let r = false; if(Str.is(uri)) { compare = (Str.is(compare))? this.parse(compare):Request.parse(); const parse = this.parse(uri); if(parse.pathname === compare.pathname && parse.search === compare.search && parse.hash === compare.hash) r = true; } return r; }, // isHashChange // retourne vrai si l'uri est la même que la comparaison mais que le hash change isHashChange: function(uri,compare) { let r = false; if(Str.is(uri)) { compare = (Str.is(compare))? this.parse(compare):Request.parse(); const parse = this.parse(uri); if(parse.protocol === compare.protocol && parse.host === compare.host && parse.pathname === compare.pathname && parse.search === compare.search) { if((Str.isNotEmpty(parse.hash) || Str.isNotEmpty(compare.hash)) && parse.hash !== compare.hash) r = true; } } return r; }, // isSameWithHash // retourne vrai si l'uri est la même que la comparaison, que l'uri a un hash et que le hash est identique isSameWithHash: function(uri,compare) { return this.hasFragment(uri) && uri === compare; }, // relative // retourne une uri relative relative: function(uri,hash) { return this.build(this.parse(uri),false,hash); }, // absolute // retourne une uri absolute absolute: function(uri,hash) { return this.build(this.parse(uri),true,hash); }, // path // retourne le pathname de l'uri path: function(uri) { return this.parse(uri).pathname; }, // query // retourne le query de l'uri sans le ? query: function(uri) { return this.makeQuery(this.parse(uri).search).toString(); }, // fragment // retourne le hash de l'uri sans le symbole fragment: function(uri) { return this.makeHash(this.parse(uri).hash,false); }, // basename // retourne le basename du path de l'uri basename: function(uri) { let r = null; const path = this.path(uri); if(Str.is(path)) r = path.substr(path.lastIndexOf("/") + 1); return r; }, // filename // retourne le filename du path de l'uri filename: function(uri) { let r = null; const basename = this.basename(uri); if(Str.is(basename)) r = basename.replace(/\.[^.$]+$/, ''); return r; }, // extension // retourne l'extension du path de l'uri extension: function(uri) { let r = null; const parse = this.parse(uri); const regex = /(?:\.([^.]+))?$/; const result = regex.exec(parse.pathname); if(Arr.is(result) && result.length === 2) r = result[1]; return r; }, // parse // retourne un objet avec les différentes parties d'une uri séparés // ne marche pas bien sur ie11 parse: function(uri) { Str.typecheck(uri); const schemeHost = Request.schemeHost(); if(Str.isStart("#",uri)) uri = Request.relative()+uri; return new URL(uri,schemeHost); }, // build // prend un objet parse et retourne une string uri // possible de retourner une string relative ou absolute // possible d'inclure ou non le hash build: function(parse,absolute,hash) { let r = ''; Obj.typecheck(parse); if(absolute === true) { r += (Str.is(parse.protocol))? parse.protocol:Request.scheme(true); r += "//"; r += (Str.is(parse.host))? parse.host:Request.host(); } r += parse.pathname; if(parse.search) { const searchParams = (parse.search instanceof URLSearchParams)? parse.search:this.makeQuery(parse.search); const query = searchParams.toString(); if(Str.isNotEmpty(query)) r += "?"+query; } if(parse.hash && hash === true) r += this.makeHash(parse.hash,true); return r; }, // makeQuery // permet de retourner un objet urlSearchParams à partir d'une string ou un object makeQuery: function(value) { const r = (Str.is(value))? new URLSearchParams(value):new URLSearchParams(); if(Pojo.is(value)) { Pojo.each(value,function(value,key) { value = (value == null)? '':value; r.append(key,value); }); } return r; }, // makeHash // permet de faire une hash avec ou sans le hash makeHash: function(value,symbol) { let r = ''; if(Str.isNotEmpty(value)) { r = value; const hasHash = Str.isStart('#',r); if(symbol === true) r = (!hasHash)? "#"+r:r; else if(hasHash) r = Str.sub(1,true,r); } else if(symbol === true) r = '#'; return r; }, // getMailto // permet d'obtenir un email à partir d'un mailto (comme dans un href) getMailto: function(value) { let r = null; Str.typecheck(value); if(Str.isNotEmpty(value)) { const email = value.replace(/mailto:/,''); if(Validate.isEmail(email)) r = email; } return r; } }
skoppula/ternarynet
tensorpack/tfutils/symbolic_functions.py
# -*- coding: UTF-8 -*- # File: symbolic_functions.py # Author: <NAME> <<EMAIL>> import tensorflow as tf import numpy as np from ..utils import logger def prediction_incorrect(logits, label, topk=1): """ :param logits: NxC :param label: N :returns: a float32 vector of length N with 0/1 values, 1 meaning incorrect prediction """ return tf.cast(tf.logical_not(tf.nn.in_top_k(logits, label, topk)), tf.float32) def flatten(x): """ Flatten the tensor. """ return tf.reshape(x, [-1]) def batch_flatten(x): """ Flatten the tensor except the first dimension. """ shape = x.get_shape().as_list()[1:] if None not in shape: return tf.reshape(x, [-1, np.prod(shape)]) return tf.reshape(x, tf.pack([tf.shape(x)[0], -1])) def logSoftmax(x): """ Batch log softmax. :param x: NxC tensor. :returns: NxC tensor. """ logger.warn("symbf.logSoftmax is deprecated in favor of tf.nn.log_softmax") return tf.nn.log_softmax(x) def class_balanced_binary_class_cross_entropy(pred, label, name='cross_entropy_loss'): """ The class-balanced cross entropy loss for binary classification, as in `Holistically-Nested Edge Detection <http://arxiv.org/abs/1504.06375>`_. :param pred: size: b x ANYTHING. the predictions in [0,1]. :param label: size: b x ANYTHING. the ground truth in {0,1}. :returns: class-balanced binary classification cross entropy loss """ z = batch_flatten(pred) y = tf.cast(batch_flatten(label), tf.float32) count_neg = tf.reduce_sum(1. - y) count_pos = tf.reduce_sum(y) beta = count_neg / (count_neg + count_pos) eps = 1e-8 loss_pos = -beta * tf.reduce_mean(y * tf.log(tf.abs(z) + eps), 1) loss_neg = (1. - beta) * tf.reduce_mean((1. - y) * tf.log(tf.abs(1. - z) + eps), 1) cost = tf.sub(loss_pos, loss_neg) cost = tf.reduce_mean(cost, name=name) return cost def print_stat(x, message=None): """ a simple print op. Use it like: x = print_stat(x) """ if message is None: message = x.op.name return tf.Print(x, [tf.reduce_mean(x), x], summarize=20, message=message, name='print_' + x.op.name) def rms(x, name=None): if name is None: name = x.op.name + '/rms' with tf.name_scope(None): # name already contains the scope return tf.sqrt(tf.reduce_mean(tf.square(x)), name=name) return tf.sqrt(tf.reduce_mean(tf.square(x)), name=name) def huber_loss(x, delta=1, name=None): if name is None: name = 'huber_loss' sqrcost = tf.square(x) abscost = tf.abs(x) return tf.reduce_sum( tf.select(abscost < delta, sqrcost * 0.5, abscost * delta - 0.5 * delta ** 2), name=name) def get_scalar_var(name, init_value): return tf.get_variable(name, shape=[], initializer=tf.constant_initializer(init_value), trainable=False)
Musician101/MCDND
mcdnd-sponge/src/main/java/io/musician101/mcdnd/sponge/data/type/skill/SkillType.java
package io.musician101.mcdnd.sponge.data.type.skill; import io.musician101.mcdnd.sponge.clazz.ProficiencyBonus; import io.musician101.mcdnd.sponge.data.manipulator.mutable.AbilityScoreData; import io.musician101.mcdnd.sponge.data.type.AbilityScoreType; import io.musician101.mcdnd.sponge.data.type.MCDNDCatalogType; import java.util.Optional; import org.spongepowered.api.util.annotation.CatalogedBy; @CatalogedBy(SkillTypes.class) public class SkillType extends MCDNDCatalogType { private final AbilityScoreType type; public SkillType(String name, AbilityScoreType type) { super(name); this.type = type; } public Optional<Integer> getMod(AbilityScoreData score, ProficiencyBonus bonus, SkillModType skillModType) { Optional<Integer> optional = score.get(type); if (!optional.isPresent()) { return Optional.empty(); } return Optional.of(skillModType.calculateMod(type.getMod(optional.get()), bonus)); } public AbilityScoreType getScoreType() { return type; } }
JeromeGobeil/SENG401Chess
src/main/java/com/leokom/games/commons/package-info.java
<gh_stars>0 /** * Notions that are common for different games should be defined here. * It's an evolution from chess-dependent to game-universal notions. */ package com.leokom.games.commons;
ideacrew/event_source
lib/event_source/protocols/http/faraday_connection_proxy.rb
# frozen_string_literal: true module EventSource module Protocols module Http # Build an HTTP Connection definition using Faraday client class FaradayConnectionProxy # @attr_reader [String] connection_uri String used to connect with HTTP server # @attr_reader [String] connection_params Settings used for configuring {::Faraday::Connection} # @attr_reader [Faraday::Connection] subject Server Connection instance attr_reader :connection_uri, :connection_params, :subject # AsyncAPI HTTP Bindings Protocol version supported by this client ProtocolVersion = '0.1.0' # Faraday gem version used by this client ClientVersion = Faraday::VERSION # Default value for {::Faraday::Connection} Adapter # Override this value using the options argument in the constructor AdapterDefaults = { typhoeus: nil }.freeze # Default values for {::Faraday::Connection} HTTP parameters. # Override these values using the options argument in the constructor HttpDefaults = { http: { headers: {}, params: {} } }.freeze # Default values for {::Faraday::Connection} Request Middleware. These are an # ordered stack of request-related processing components (setting headers, # encoding parameters). Order: highest to loweest importance # # Override default values using the options argument in the constructor RequestMiddlewareParamsDefault = { retry: { order: 10, options: { max: 5, interval: 0.05, interval_randomness: 0.5, backoff_factor: 2 } } }.freeze # Default values for {::Faraday::Connection} Response Middleware. These are an # ordered stack of response-related processing components. Order: lowest to highest importance # # Override default values using the options argument in the constructor ResponseMiddlewareParamsDefault = { }.freeze JsonResponseMiddlewareParamsDefault = { json: { order: 10, options: {} } }.freeze # @param [Hash] async_api_server {EventSource::AsyncApi::Server} configuration # @param [Hash] options Connection options # @option options [Hash] :http (HttpDefaults) key/value pairs of http connection params # @option options [Symbol] :adapter (:typheous) the adapter Faraday will use to # connect and process requests # @option options [Hash] :request_middleware_params (RequestMiddlewareParamsDefault) key/value pairs for # configuring Faraday request middleware # @option options [Hash] :response_middlware_params (ResponseMiddlewareParamsDefault) key/value pairs for # configuring Faraday response middleware # # @example AsyncAPI Server in YAML format # production: # url: production.example.com # description: Development server # protocol: http # protocolVersion: '1.0.0' def initialize(async_api_server, options = {}) @protocol_version = ProtocolVersion @client_version = ClientVersion @server = async_api_server @connection_params = connection_params_for(options) @connection_uri = self.class.connection_uri_for(async_api_server) @channel_proxies = {} @subject = build_connection end def build_connection request_middleware_params = construct_request_middleware response_middleware_params = connection_params[:response_middleware_params] # adapter = connection_params[:adapter] Faraday.new( build_faraday_parameters(connection_params) ) do |conn| request_middleware_params.sort_by do |_k, v| v[:order] end.each do |middleware, value| conn.request middleware.to_sym, value[:options] end response_middleware_params.sort_by do |_k, v| v[:order] end.each do |middleware, value| conn.response middleware.to_sym, value[:options] end conn.response :logger, nil, { headers: true, bodies: true, log_level: :error } # conn.adapter :http # last middleware must be adapter # adapter.each_pair do |component, options| # conn.adapter component.to_s.to_sym, options || {} # end end end def connection @subject end # Verify connection # @return [EventSource::Noop] noop No operation for HTTP service connections def start # Verify connection: # Network # Authentication EventSource::Noop.new end # The status of the connection instance def active? return true if @subject.blank? return true if @channel_proxies.empty? @channel_proxies.values.any?(&:active?) end # Closes the underlying resources and connections. For persistent # connections this closes all currently open connections def close @channel_proxies.values.each(&:close) end def reconnect EventSource::Noop.new end # The version of Faraday client in use def client_version ClientVersion end # AsyncAPI HTTP Bindings Protocol version supported by this client def protocol_version ProtocolVersion end def protocol :http end # Create a channel for processing HTTP protocol requests # @param [EventSource::AsyncApi::ChannelItem] async_api_channel_item # Channel configuration and bindings # @return [FaradayChannelProxy] def add_channel(channel_item_key, async_api_channel_item) @channel_proxies[channel_item_key] = FaradayChannelProxy.new( self, channel_item_key, async_api_channel_item ) end def respond_to_missing?(name, include_private); end # This class applies both the Adapter and Proxy development patterns. # It supports the EventSource DSL via the Adapter pattern and serves # as Proxy for accessing {::Faraday::Connection} methods # @param [String] name the {::Faraday::Connection} method to send a message # @param [Mixed] args the message to send to method def method_missing(name, *args) @subject.send(name, *args) end class << self # Creates unique URI for this connection based on # {EventSource::AsyncAPI::Server} configuration values # @return [String] uri connection key def connection_uri_for(async_api_server) parsed_url = URI(async_api_server[:url]).normalize if parsed_url.path && parsed_url.path == "/" parsed_url.to_s.chomp("/") else parsed_url.to_s end end end # Set request_middleware_params # @overload request_middleware_params=(values) # @param [Hash] values New values # @return [Object] An assignment method, so always returns the RHS def request_middleware_params=(values = nil) return unless values.instance_of?(Hash) values.symbolize_keys! @request_middleware_params = values.select do |key, _value| attribute_keys.empty? || attribute_keys.include?(key) end end private def build_faraday_parameters(connection_params) http_params = connection_params[:http][:params] headers = connection_params[:http][:headers] ssl_options = build_ssl_options { url: @connection_uri, params: http_params, headers: headers }.merge(ssl_options) end def build_ssl_options return {} if @server[:client_certificate].blank? client_certificate_options = @server[:client_certificate] client_key_password = client_certificate_options[:client_key_password] || "" client_certificate = OpenSSL::X509::Certificate.new( File.read( client_certificate_options[:client_certificate] ) ) client_key_binary = File.read(client_certificate_options[:client_key]) client_key = OpenSSL::PKey.read(client_key_binary, client_key_password) { ssl: { client_key: client_key, client_cert: client_certificate } } end def connection_params_for(options) request_middleware_params = options[:request_middleware_params] || RequestMiddlewareParamsDefault response_middleware_params = options[:response_middleware_params] response_middleware_params ||= json_request? ? JsonResponseMiddlewareParamsDefault : ResponseMiddlewareParamsDefault adapter = AdapterDefaults.merge(options[:adapter] || {}) http = HttpDefaults.merge(options[:http] || {}) options[:content_type] { request_middleware_params: request_middleware_params, response_middleware_params: response_middleware_params, adapter: adapter }.merge http end def construct_request_middleware if soap_request? { retry: { order: 10, options: { max: 5, interval: 0.05, interval_randomness: 0.5, backoff_factor: 2 } }, soap_payload_header: { order: 20, options: { soap_settings: @server[:soap] } } } else connection_params[:request_middleware_params] end end def json_request? request_content_type.to_s == 'json' end def soap_request? request_content_type.to_s == 'soap' end def request_content_type case @server[:default_content_type] when 'application/json'; :json when 'application/soap+xml' :soap when 'text/xml' :xml end end end end end end
adequatesystems/crypto-c
src/test/crc32-vectors-cu.c
<filename>src/test/crc32-vectors-cu.c /* must be declared before includes */ #ifndef CUDA #define CUDA #endif #include <stdint.h> #include "_assert.h" #include "../crc32.h" #define NUMVECTORS 7 #define MAXVECTORLEN 81 /* Test vectors used in RFC 1321 */ static char rfc_1321_vectors[NUMVECTORS][MAXVECTORLEN] = { "", "a", "abc", "message digest", "abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", "1234567890123456789012345678901234567890123456789012345678901234" "5678901234567890" }; /* expected results to test vectors */ static uint32_t expect[NUMVECTORS] = { 0, 0xe8b7be43, 0x352441c2, 0x20159d7f, 0x4c2750bd, 0x1fc2e6d2, 0x7ca94a72 }; int main() { /* check crc32() digest results match expected */ size_t inlen[NUMVECTORS]; uint32_t ret[NUMVECTORS]; int j; /* init memory (synchronous) */ memset(ret, 0, sizeof(ret)); for (j = 0; j < NUMVECTORS; j++) { inlen[j] = strlen(rfc_1321_vectors[j]); } /* perform bulk hash */ test_kcu_crc32(rfc_1321_vectors, inlen, MAXVECTORLEN, ret, NUMVECTORS); /* analyze results */ for (j = 0; j < NUMVECTORS; j++) { ASSERT_EQ(ret[j], expect[j]); } }
ruby-hyperloop/hyperloop-examples
hyper-operation/smoke_test/app/hyperloop/components/hello.rb
class Hello < Hyperloop::Component state time: Time.now after_mount do every(1) { mutate.time Time.now } end render(DIV) do DIV { "Hello! The time is #{state.time}." } DIV { INPUT(id: :message1); BUTTON { "send"} .on(:click) { SendToAll.run(message: Element['#message1'].value) } } DIV { INPUT(id: :message2); BUTTON { "send2"} .on(:click) { Operations::NestedSendToAll.run(message: Element['#message2'].value) } } if Messages.all.count == 0 DIV { "No Messages" } else UL do Messages.all.each do |message| LI { message } end end end end end