repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
lht19900714/Leetcode_Solutions | Algorithms/0079_Word_Search/Python/Word_Search_Solution_1.py | <gh_stars>0
# Space: O(l)
# Time: O(m * n * l)
class Solution:
def exist(self, board, word) -> bool:
column_length = len(board)
row_length = len(board[0])
word_length = len(word)
def dfs(x, y, index):
if index >= word_length: return True
if (not 0 <= x < row_length) or (not 0 <= y < column_length): return False
if board[y][x] != word[index]: return False
temp = board[y][x]
board[y][x] = '*'
if dfs(x - 1, y, index + 1): return True
if dfs(x + 1, y, index + 1): return True
if dfs(x, y - 1, index + 1): return True
if dfs(x, y + 1, index + 1): return True
board[y][x] = temp
for column in range(column_length):
for row in range(row_length):
if dfs(row, column, 0): return True
return False
|
hcfman/hydracontrolfreak | src/main/java/com/hydracontrolfreak/hcf/streamer/Img.java | <filename>src/main/java/com/hydracontrolfreak/hcf/streamer/Img.java
package com.hydracontrolfreak.hcf.streamer;
import com.hydracontrolfreak.hcf.freak.Freak;
import com.hydracontrolfreak.hcf.freak.api.FreakApi;
import com.hydracontrolfreak.hcf.hcfdevice.config.HcfDeviceConfig;
import org.apache.log4j.Logger;
import org.springframework.web.bind.annotation.RequestMapping;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.text.SimpleDateFormat;
import java.util.Date;
@WebServlet(urlPatterns={"/img"})
public class Img extends HttpServlet {
private static final long serialVersionUID = 5019870886557760386L;
private static final Logger logger = Logger.getLogger(Img.class);
HcfDeviceConfig hcfConfig;
private FreakApi freak;
@Override
public void init() throws ServletException {
// freak = Freak.getInstance();
}
private void abort(final HttpServletResponse response, final String message) throws ServletException, IOException {
response.setContentType("text/plain");
final PrintWriter out = response.getWriter();
out.println(message);
out.close();
}
@RequestMapping("/img")
protected void service(final HttpServletRequest request,
final HttpServletResponse response) throws ServletException, IOException {
if (freak == null)
freak = Freak.getInstance();
if (!freak.isReady())
return;
final String t = request.getParameter("t");
final String n = request.getParameter("n");
final String cam = request.getParameter("cam");
if (logger.isDebugEnabled()) logger.debug("Cam: " + cam);
if (t == null) {
abort(response, "Parameter t is missing");
return;
}
if (!t.matches("^\\d+$")) {
abort(response, "Parameter t should be a number");
return;
}
final long tValue = Long.parseLong(t);
if (cam == null) {
abort(response, "Parameter cam is missing");
return;
}
if (!cam.matches("^\\d+$")) {
abort(response, "Parameter cam should be a number");
return;
}
if (n == null) {
abort(response, "Parameter n is missing");
return;
}
if (!n.matches("^\\d+$")) {
abort(response, "Parameter n should be a number");
return;
}
final int nValue = Integer.parseInt(n);
if (nValue < 1 || nValue > 9999) {
abort(response, "Parameter n should be >= 1 & <= 9999");
return;
}
final Date d = new Date(tValue);
final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
final String dayString = sdf.format(d);
final String filename = freak.getHcfBase() + "/disk/hcf/dvr_images/" + cam + "/" + dayString + "/" + t + "/" + String.format("%04d", nValue) + ".jpg";
if (logger.isDebugEnabled()) logger.debug("Filename: " + filename);
final File infile = new File(filename);
if (!infile.exists()) {
abort(response, "Not found");
return;
}
response.setContentType("image/jpeg");
final File imageFile = new File(filename);
response.setContentLength((int) imageFile.length());
final FileInputStream imageStream = new FileInputStream(imageFile);
final byte[] buffer = new byte[(int) imageFile.length()];
int offset = 0;
int bytesRead = 0;
final int totalSize = (int) imageFile.length();
while (offset != totalSize
&& (bytesRead = imageStream.read(buffer, offset, totalSize
- offset)) > 0)
offset += bytesRead;
final OutputStream outStream = response.getOutputStream();
outStream.write(buffer);
try {
Thread.sleep(200);
} catch (final InterruptedException e) {
e.printStackTrace();
}
outStream.write("\r\n".getBytes());
imageStream.close();
outStream.close();
}
}
|
sunjc53yy/chromium | chromecast/browser/test/chromecast_shell_browser_test.cc | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/macros.h"
#include "chromecast/browser/test/chromecast_browser_test.h"
#include "url/gurl.h"
#include "url/url_constants.h"
namespace chromecast {
namespace shell {
class ChromecastShellBrowserTest : public ChromecastBrowserTest {
public:
ChromecastShellBrowserTest() : url_(url::kAboutBlankURL) {}
virtual void SetUpOnMainThread() override {
CreateBrowser();
NavigateToURL(web_contents(), url_);
}
private:
const GURL url_;
DISALLOW_COPY_AND_ASSIGN(ChromecastShellBrowserTest);
};
IN_PROC_BROWSER_TEST_F(ChromecastShellBrowserTest, EmptyTest) {
// Run an entire browser lifecycle to ensure nothing breaks.
// TODO(gunsch): Remove this test case once there are actual assertions to
// test in a ChromecastBrowserTest instance.
EXPECT_TRUE(true);
}
} // namespace shell
} // namespace chromecast
|
friedlwo/AppWoksUtils | src/org/appwork/utils/swing/dialog/SearchDialog.java | /**
* Copyright (c) 2009 - 2010 AppWork UG(haftungsbeschränkt) <<EMAIL>>
*
* This file is part of org.appwork.utils.swing.dialog
*
* This software is licensed under the Artistic License 2.0,
* see the LICENSE file or http://www.opensource.org/licenses/artistic-license-2.0.php
* for details
*/
package org.appwork.utils.swing.dialog;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import javax.swing.BorderFactory;
import javax.swing.JCheckBox;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.JTextPane;
import javax.swing.text.JTextComponent;
import net.miginfocom.swing.MigLayout;
import org.appwork.resources.AWUTheme;
import org.appwork.storage.JSonStorage;
import org.appwork.uio.UIOManager;
import org.appwork.utils.locale._AWU;
public class SearchDialog extends AbstractDialog<String> implements KeyListener, MouseListener {
private final String message;
private JTextPane messageArea;
private JTextComponent input;
private final JCheckBox caseSensitive;
private final JCheckBox regularExpression;
public SearchDialog(final int flag, final String title, final String message) {
super(flag | UIOManager.BUTTONS_HIDE_CANCEL, title, AWUTheme.I().getIcon("dialog/find", 32), _AWU.T.SEARCHDIALOG_BUTTON_FIND(), null);
caseSensitive = new JCheckBox(_AWU.T.SEARCHDIALOG_CHECKBOX_CASESENSITIVE());
regularExpression = new JCheckBox(_AWU.T.SEARCHDIALOG_CHECKBOX_REGULAREXPRESSION());
caseSensitive.setSelected(JSonStorage.getStorage("SearchDialog").get("caseSensitive", false));
regularExpression.setSelected(JSonStorage.getStorage("SearchDialog").get("regularExpression", false));
this.message = message;
}
/*
* (non-Javadoc)
*
* @see org.appwork.utils.swing.dialog.AbstractDialog#getRetValue()
*/
@Override
protected String createReturnValue() {
return getReturnID();
}
public String getReturnID() {
if ((getReturnmask() & (Dialog.RETURN_OK | Dialog.RETURN_TIMEOUT)) == 0) { return null; }
if (input.getText() == null || input.getText().equals("")) { return null; }
try {
JSonStorage.getStorage("SearchDialog").put("caseSensitive", caseSensitive.isSelected());
JSonStorage.getStorage("SearchDialog").put("regularExpression", regularExpression.isSelected());
} catch (final Exception e) {
org.appwork.utils.logging.Log.exception(e);
}
return input.getText();
}
public boolean isCaseSensitive() {
return caseSensitive.isSelected();
}
public boolean isRegex() {
return regularExpression.isSelected();
}
public void keyPressed(final KeyEvent e) {
cancel();
}
public void keyReleased(final KeyEvent e) {
}
public void keyTyped(final KeyEvent e) {
}
@Override
public JComponent layoutDialogContent() {
final JPanel contentpane = new JPanel(new MigLayout("ins 0,wrap 1", "[fill,grow]"));
messageArea = new JTextPane();
messageArea.setBorder(null);
messageArea.setBackground(null);
messageArea.setOpaque(false);
messageArea.setText(message);
messageArea.setEditable(false);
messageArea.putClientProperty("Synthetica.opaque", Boolean.FALSE);
contentpane.add(messageArea);
input = new JTextField();
input.setBorder(BorderFactory.createEtchedBorder());
input.addKeyListener(this);
input.addMouseListener(this);
contentpane.add(input, "pushy,growy");
contentpane.add(regularExpression, "split 2, alignx right, pushx");
contentpane.add(caseSensitive, " alignx right");
return contentpane;
}
public void mouseClicked(final MouseEvent e) {
cancel();
}
public void mouseEntered(final MouseEvent e) {
}
public void mouseExited(final MouseEvent e) {
}
public void mousePressed(final MouseEvent e) {
}
public void mouseReleased(final MouseEvent e) {
}
@Override
protected void initFocus(final JComponent focus) {
input.selectAll();
input.requestFocusInWindow();
}
}
|
qiyesmart/Java | CrateThread1/CreateThread.java | /**
* Author:QiyeSmart
* Created:2019/4/24
*/
/*
线程的创建:
线程的创建:
1.
1.1 自定义类继承java.lang.Thread类,覆写run方法
1.2 实例化自定义类对象,该对象就是一个具备线程执行的对象
1.3 线程类的对象run方法直接调用和普通类的对象调用没有区别
1.3.1直接调用run方法,实际还是同步执行,跟线程没关系
1.3.2 调用start方法,才会异步执行,跟线程有关系,该方法不能多次调用
1.4 native修饰的方法称为本地方法,需要依赖平台实现 JNI(Java Native Interface)
2.Runnable类实现(业务无返回结果)
2.1 自定义类实现java.lang.Runnable接口,实现run方法
2.2 实例化Thread类对象,通过构造方法传入Runnable接口的实现类的实例化对象或者一个Lambda表达式
2.3 线程的启动方式start
3.Callable类实现(业务有返回结果):
3.1 自定义类实现java.util.concurrent.Callable接口,实现call方法
3.2 FutureTask 包装 Callable接口实现类的对象
3.3 实例化Thread类对象,通过构造方法传入FutureTask对象
3.4 通过Thread对象的start方法启动线程
3.5 通过FutureTask对象的get方法获取结果(阻塞方法)
继承Thread vs 实现Runnable/Callable
继承Thread:
+ 单继承缺陷
+ 业务逻辑和线程逻辑耦合
实现Runnable
+ 解决Thread的继承缺陷,实现多接口
+ 业务逻辑和线程逻辑无关
+ 创建Thread对象,传入业务对象
+ 业务逻辑能够复用
4. 线程方法
4.1 线程名称
4.1.1线程创建建议设置一个简明思议的名称(表示线程的功能)
4.1.2 获取当前线程Thread.currentThread()
4.2.3 获取线程名称thread.getName()
4.2.4 线程名称:普通线程名称 thread-index , 主线程名称:main
4.2.5主方法本身就是一个线程,所有的用户线程都是通过主线程创建并启动的,用户线程中也可以创建其它线程。
4.2.5Java程序启动,创建JVM进程,创建main线程,JVM进程中至少有一个线程(实际上JVM启动之后除了main线程外,还有其它线程,比如垃圾回收线程)
4.2.6 Java中线程是最小的执行单元,JVM进程线程不存在了,JVM进程退出
*/
|
Scripta-Qumranica-Electronica/Scrollery-website | src/js/components/editor/sign_attributes.js | export default {
readability: {
incomplete: {
clear: 'INCOMPLETE_BUT_CLEAR',
unclear: 'INCOMPLETE_AND_NOT_CLEAR'
}
}
}
|
LoliGothick/mitama-dimensional | include/mitama/dimensional/systems/si/base_dimension/equivalent_dose.hpp | <reponame>LoliGothick/mitama-dimensional
#ifndef MITAMA_DIMENSIONAL_DERIVED_UNITS_EQUIVALENT_DOSE_HPP
#define MITAMA_DIMENSIONAL_DERIVED_UNITS_EQUIVALENT_DOSE_HPP
namespace mitama::systems::si {
struct equivalent_dose { using is_base_dimension = void; };
}
#endif
|
of13tch/siembol | config-editor/config-editor-services/src/main/java/uk/co/gresearch/siembol/configeditor/service/alerts/AlertingRuleSchemaService.java | <gh_stars>100-1000
package uk.co.gresearch.siembol.configeditor.service.alerts;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.SerializationFeature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import uk.co.gresearch.siembol.common.model.AlertingStormAttributesDto;
import uk.co.gresearch.siembol.common.jsonschema.SiembolJsonSchemaValidator;
import uk.co.gresearch.siembol.configeditor.common.ConfigImporter;
import uk.co.gresearch.siembol.configeditor.model.ConfigEditorAttributes;
import uk.co.gresearch.siembol.configeditor.model.ConfigEditorResult;
import uk.co.gresearch.siembol.configeditor.common.ConfigEditorUtils;
import uk.co.gresearch.siembol.configeditor.common.ConfigSchemaService;
import uk.co.gresearch.siembol.alerts.common.AlertingAttributes;
import uk.co.gresearch.siembol.alerts.common.AlertingResult;
import uk.co.gresearch.siembol.alerts.compiler.AlertingCompiler;
import uk.co.gresearch.siembol.alerts.compiler.AlertingCorrelationRulesCompiler;
import uk.co.gresearch.siembol.alerts.compiler.AlertingRulesCompiler;
import uk.co.gresearch.siembol.configeditor.model.ConfigEditorUiLayout;
import uk.co.gresearch.siembol.configeditor.service.alerts.sigma.SigmaRuleImporter;
import uk.co.gresearch.siembol.configeditor.service.common.ConfigSchemaServiceAbstract;
import uk.co.gresearch.siembol.configeditor.service.common.ConfigSchemaServiceContext;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import static uk.co.gresearch.siembol.configeditor.model.ConfigEditorResult.StatusCode.OK;
public class AlertingRuleSchemaService extends ConfigSchemaServiceAbstract {
private static final Logger LOG = LoggerFactory
.getLogger(MethodHandles.lookup().lookupClass());
private static final ObjectReader TEST_SPECIFICATION_READER = new ObjectMapper()
.readerFor(AlertingTestSpecificationDto.class);
private static final ObjectWriter ALERTING_ATTRIBUTES_WRITER = new ObjectMapper()
.setSerializationInclusion(JsonInclude.Include.NON_NULL)
.writerFor(AlertingAttributes.class)
.with(SerializationFeature.INDENT_OUTPUT);
private static final ObjectReader ADMIN_CONFIG_READER = new ObjectMapper()
.readerFor(AlertingStormAttributesDto.class);
private static final String SCHEMA_INIT_ERROR = "Error during computing rules schema";
private static final String TESTING_ERROR = "Unexpected rule testing service result";
private static final String SIGMA_IMPORTER_NAME = "sigma";
private final AlertingCompiler alertingCompiler;
AlertingRuleSchemaService(AlertingCompiler alertingCompiler,
ConfigSchemaServiceContext context) {
super(context);
this.alertingCompiler = alertingCompiler;
}
@Override
public ConfigEditorResult validateConfiguration(String rule) {
AlertingResult alertingResult = alertingCompiler.validateRule(rule);
return fromAlertingValidateResult(alertingResult);
}
@Override
public ConfigEditorResult validateConfigurations(String rules) {
AlertingResult alertingResult = alertingCompiler.validateRules(rules);
return fromAlertingValidateResult(alertingResult);
}
public static ConfigSchemaService createAlertingRuleSchemaService(ConfigEditorUiLayout uiLayout) throws Exception {
LOG.info("Initialising alerts rule schema service");
ConfigSchemaServiceContext context = new ConfigSchemaServiceContext();
AlertingCompiler compiler = AlertingRulesCompiler.createAlertingRulesCompiler();
AlertingResult schemaResult = compiler.getSchema();
if (schemaResult.getStatusCode() != AlertingResult.StatusCode.OK
|| schemaResult.getAttributes().getRulesSchema() == null
|| uiLayout == null) {
LOG.error(SCHEMA_INIT_ERROR);
throw new IllegalStateException(SCHEMA_INIT_ERROR);
}
Optional<String> computedSchema = ConfigEditorUtils
.patchJsonSchema(schemaResult.getAttributes().getRulesSchema(), uiLayout.getConfigLayout());
SiembolJsonSchemaValidator testValidator = new SiembolJsonSchemaValidator(AlertingTestSpecificationDto.class);
String testSchema = testValidator.getJsonSchema().getAttributes().getJsonSchema();
SiembolJsonSchemaValidator adminConfigValidator = new SiembolJsonSchemaValidator(AlertingStormAttributesDto.class);
Optional<String> adminConfigSchemaUi = ConfigEditorUtils.patchJsonSchema(
adminConfigValidator.getJsonSchema().getAttributes().getJsonSchema(),
uiLayout.getAdminConfigLayout());
Optional<String> testSchemaUi = ConfigEditorUtils.patchJsonSchema(testSchema, uiLayout.getTestLayout());
if (!computedSchema.isPresent()
|| !adminConfigSchemaUi.isPresent()
|| !testSchemaUi.isPresent()) {
LOG.error(SCHEMA_INIT_ERROR);
throw new IllegalStateException(SCHEMA_INIT_ERROR);
}
context.setConfigSchema(computedSchema.get());
context.setAdminConfigSchema(adminConfigSchemaUi.get());
context.setAdminConfigValidator(adminConfigValidator);
context.setTestSchema(testSchema);
Map<String, ConfigImporter> importerMap = new HashMap<>();
importerMap.put(SIGMA_IMPORTER_NAME, new SigmaRuleImporter.Builder().configEditorUiLayout(uiLayout).build());
context.setConfigImporters(importerMap);
LOG.info("Initialising alerts rule schema service completed");
return new AlertingRuleSchemaService(compiler, context);
}
public static ConfigSchemaService createAlertingCorrelationRuleSchemaService(
ConfigEditorUiLayout uiLayout) throws Exception {
LOG.info("Initialising alerts correlation rule schema service");
ConfigSchemaServiceContext context = new ConfigSchemaServiceContext();
AlertingCompiler compiler = AlertingCorrelationRulesCompiler.createAlertingCorrelationRulesCompiler();
AlertingResult schemaResult = compiler.getSchema();
if (schemaResult.getStatusCode() != AlertingResult.StatusCode.OK
|| schemaResult.getAttributes().getRulesSchema() == null
|| uiLayout == null) {
LOG.error(SCHEMA_INIT_ERROR);
throw new IllegalStateException(SCHEMA_INIT_ERROR);
}
Optional<String> computedSchema = ConfigEditorUtils
.patchJsonSchema(schemaResult.getAttributes().getRulesSchema(), uiLayout.getConfigLayout());
SiembolJsonSchemaValidator adminConfigValidator = new SiembolJsonSchemaValidator(AlertingStormAttributesDto.class);
Optional<String> adminConfigSchemaUi = ConfigEditorUtils.patchJsonSchema(
adminConfigValidator.getJsonSchema().getAttributes().getJsonSchema(),
uiLayout.getAdminConfigLayout());
if (!computedSchema.isPresent() || !adminConfigSchemaUi.isPresent()) {
LOG.error(SCHEMA_INIT_ERROR);
throw new IllegalStateException(SCHEMA_INIT_ERROR);
}
context.setConfigSchema(computedSchema.get());
context.setAdminConfigSchema(adminConfigSchemaUi.get());
context.setAdminConfigValidator(adminConfigValidator);
LOG.info("Initialising alerts correlation rule schema service completed");
return new AlertingRuleSchemaService(compiler, context);
}
@Override
public ConfigEditorResult testConfiguration(String rule, String testSpecification) {
AlertingTestSpecificationDto specificationDto;
try {
specificationDto = TEST_SPECIFICATION_READER.readValue(testSpecification);
} catch (IOException e) {
return ConfigEditorResult.fromException(e);
}
return fromAlertingTestResult(alertingCompiler.testRule(rule, specificationDto.getEventContent()));
}
@Override
public ConfigEditorResult testConfigurations(String rule, String testSpecification) {
AlertingTestSpecificationDto specificationDto;
try {
specificationDto = TEST_SPECIFICATION_READER.readValue(testSpecification);
} catch (IOException e) {
return ConfigEditorResult.fromException(e);
}
return fromAlertingTestResult(alertingCompiler.testRules(rule, specificationDto.getEventContent()));
}
@Override
public ConfigEditorResult getAdminConfigTopologyName(String configuration) {
try {
AlertingStormAttributesDto adminConfig = ADMIN_CONFIG_READER.readValue(configuration);
ConfigEditorAttributes attributes = new ConfigEditorAttributes();
attributes.setTopologyName(adminConfig.getTopologyName());
return new ConfigEditorResult(OK, attributes);
} catch (IOException e) {
return ConfigEditorResult.fromException(e);
}
}
private ConfigEditorResult fromAlertingValidateResult(AlertingResult alertingResult) {
ConfigEditorAttributes attr = new ConfigEditorAttributes();
ConfigEditorResult.StatusCode statusCode = alertingResult.getStatusCode() == AlertingResult.StatusCode.OK
? OK
: ConfigEditorResult.StatusCode.ERROR;
attr.setMessage(alertingResult.getAttributes().getMessage());
attr.setException(alertingResult.getAttributes().getException());
return new ConfigEditorResult(statusCode, attr);
}
private ConfigEditorResult fromAlertingTestResult(AlertingResult alertingResult) {
ConfigEditorAttributes attr = new ConfigEditorAttributes();
if (alertingResult.getStatusCode() != AlertingResult.StatusCode.OK) {
attr.setMessage(alertingResult.getAttributes().getMessage());
attr.setException(alertingResult.getAttributes().getException());
return new ConfigEditorResult(ConfigEditorResult.StatusCode.ERROR, attr);
}
if (alertingResult.getAttributes().getMessage() == null) {
return ConfigEditorResult.fromMessage(ConfigEditorResult.StatusCode.ERROR,
TESTING_ERROR);
}
attr.setTestResultComplete(true);
attr.setTestResultOutput(alertingResult.getAttributes().getMessage());
AlertingAttributes alertingAttributes = new AlertingAttributes();
alertingAttributes.setOutputEvents(alertingResult.getAttributes().getOutputEvents());
alertingAttributes.setExceptionEvents(alertingResult.getAttributes().getExceptionEvents());
try {
String rawTestOutput = ALERTING_ATTRIBUTES_WRITER.writeValueAsString(alertingAttributes);
attr.setTestResultRawOutput(rawTestOutput);
} catch (JsonProcessingException e) {
return ConfigEditorResult.fromException(e);
}
return new ConfigEditorResult(OK, attr);
}
}
|
hieuhunter/frontend-dienthoai-admin | src/modules/main/components/tag/components/create/components/index.js | import React from 'react';
import { useFormik } from 'formik';
import * as Yup from 'yup';
import httpRequest from 'common/utils/httpRequest';
import Card from 'common/components/Card/components';
import Breadcrumb from 'common/components/Breadcrumb/components';
import history from 'common/utils/history';
import classNames from 'classnames';
import { useSelector } from 'react-redux';
const CreateTagComponent = () => {
const auth = useSelector((state) => state.appAuth.current);
const formik = useFormik({
initialValues: {
title: '',
slug: '',
content: ''
},
validationSchema: Yup.object({
title: Yup.string().required('Title is required').max(166, 'Title is maximum 166 characters'),
slug: Yup.string().max(200, 'Slug is maximum 200 characters'),
content: Yup.string().required('Content is required').max(200, 'Content is maximum message characters')
}),
onSubmit: (values, { setSubmitting, setErrors }) => {
httpRequest
.post({
url: `/tags`,
token: auth.token.access_token,
data: {
title: values.title,
slug: values.slug,
content: values.content
}
})
.then((response) => {
if (!response.data.success) {
console.log('Error');
}
history.push(`/main/tags`);
})
.catch((error) => {
console.log(error);
})
.finally(() => {
setSubmitting(false);
});
}
});
return (
<>
<div className="content-header py-3">
<Breadcrumb>Create tag</Breadcrumb>
</div>
<div className="content-body">
<Card header="Create tag">
<form onSubmit={formik.handleSubmit} className="row g-3">
<div className="col-md-6">
<label htmlFor="title" className="form-label">
Title <span className="text-danger">*</span>
</label>
<input
type="text"
placeholder="Enter title"
className={classNames('form-control', {
'is-invalid': formik.errors.title && formik.touched.title
})}
onChange={formik.handleChange}
onBlur={formik.handleBlur}
value={formik.values.title}
name="title"
id="title"
/>
{formik.errors.title && formik.touched.title && <div className="invalid-feedback">{formik.errors.title}</div>}
</div>
<div className="col-md-6">
<label htmlFor="slug" className="form-label">
Slug
</label>
<input
type="text"
placeholder="Enter slug"
className={classNames('form-control', {
'is-invalid': formik.errors.slug && formik.touched.slug
})}
onChange={formik.handleChange}
onBlur={formik.handleBlur}
value={formik.values.slug}
name="slug"
id="slug"
/>
{formik.errors.slug && formik.touched.slug && <div className="invalid-feedback">{formik.errors.slug}</div>}
</div>
<div className="col-md-12">
<label htmlFor="content" className="form-label">
Content <span className="text-danger">*</span>
</label>
<textarea
rows="3"
placeholder="Enter content"
className={classNames('form-control', {
'is-invalid': formik.errors.content && formik.touched.content
})}
onChange={formik.handleChange}
onBlur={formik.handleBlur}
value={formik.values.content}
name="content"
id="content"
/>
{formik.errors.content && formik.touched.content && <div className="invalid-feedback">{formik.errors.content}</div>}
</div>
<div className="col-md-12">
<button className="btn btn-primary" type="submit" disabled={formik.isSubmitting}>
{formik.isSubmitting ? 'Submitting' : 'Submit'}
</button>
</div>
</form>
</Card>
</div>
</>
);
};
export default CreateTagComponent;
|
rahuldeepattri/app-autoscaler | src/integration_legacy/integration_api_scheduler_test.go | <reponame>rahuldeepattri/app-autoscaler
package integration_legacy
import (
"autoscaler/cf"
"encoding/base64"
"fmt"
"net/http"
"strings"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/onsi/gomega/ghttp"
)
var _ = Describe("Integration_legacy_Api_Scheduler", func() {
var (
appId string
policyStr []byte
initInstanceCount int = 2
serviceInstanceId string
bindingId string
orgId string
spaceId string
)
BeforeEach(func() {
startFakeCCNOAAUAA(initInstanceCount)
initializeHttpClient("api.crt", "api.key", "autoscaler-ca.crt", apiSchedulerHttpRequestTimeout)
initializeHttpClientForPublicApi("api_public.crt", "api_public.key", "autoscaler-ca.crt", apiMetricsCollectorHttpRequestTimeout)
schedulerConfPath = components.PrepareSchedulerConfig(dbUrl, fmt.Sprintf("https://127.0.0.1:%d", components.Ports[ScalingEngine]), tmpDir, defaultHttpClientTimeout)
startScheduler()
serviceBrokerConfPath = components.PrepareServiceBrokerConfig(components.Ports[ServiceBroker], components.Ports[ServiceBrokerInternal], brokerUserName, brokerPassword, false, dbUrl, fmt.Sprintf("https://127.0.0.1:%d", components.Ports[APIServer]), brokerApiHttpRequestTimeout, tmpDir)
startServiceBroker()
serviceInstanceId = getRandomId()
orgId = getRandomId()
spaceId = getRandomId()
bindingId = getRandomId()
appId = getRandomId()
brokerAuth = base64.StdEncoding.EncodeToString([]byte("username:password"))
})
AfterEach(func() {
stopServiceBroker()
stopScheduler()
})
Describe("When offered as a service", func() {
BeforeEach(func() {
apiServerConfPath = components.PrepareApiServerConfig(components.Ports[APIServer], components.Ports[APIPublicServer], false, 200, fakeCCNOAAUAA.URL(), dbUrl, fmt.Sprintf("https://127.0.0.1:%d", components.Ports[Scheduler]), fmt.Sprintf("https://127.0.0.1:%d", components.Ports[ScalingEngine]), fmt.Sprintf("https://127.0.0.1:%d", components.Ports[MetricsCollector]), fmt.Sprintf("https://127.0.0.1:%d", components.Ports[EventGenerator]), fmt.Sprintf("https://127.0.0.1:%d", components.Ports[ServiceBrokerInternal]), true, defaultHttpClientTimeout, 30, 30, tmpDir)
startApiServer()
resp, err := detachPolicy(appId, components.Ports[APIPublicServer], httpClientForPublicApi)
Expect(err).NotTo(HaveOccurred())
resp.Body.Close()
})
AfterEach(func() {
stopApiServer()
})
Context("Cloud Controller api is not available", func() {
BeforeEach(func() {
fakeCCNOAAUAA.Reset()
fakeCCNOAAUAA.AllowUnhandledRequests = true
})
Context("Create policy", func() {
It("should error with status code 500", func() {
By("check public api")
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusInternalServerError, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusInternalServerError, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
Context("Delete policy", func() {
BeforeEach(func() {
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusInternalServerError, components.Ports[APIPublicServer], httpClientForPublicApi)
})
It("should error with status code 500", func() {
doDetachPolicy(appId, http.StatusInternalServerError, "", components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusInternalServerError, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
})
Context("UAA api is not available", func() {
BeforeEach(func() {
fakeCCNOAAUAA.Reset()
fakeCCNOAAUAA.AllowUnhandledRequests = true
fakeCCNOAAUAA.RouteToHandler("GET", "/v2/info", ghttp.RespondWithJSONEncoded(http.StatusOK,
cf.Endpoints{
TokenEndpoint: fakeCCNOAAUAA.URL(),
DopplerEndpoint: strings.Replace(fakeCCNOAAUAA.URL(), "http", "ws", 1),
}))
})
Context("Create policy", func() {
It("should error with status code 500", func() {
By("check public api")
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusInternalServerError, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusInternalServerError, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
Context("Delete policy", func() {
BeforeEach(func() {
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusInternalServerError, components.Ports[APIPublicServer], httpClientForPublicApi)
})
It("should error with status code 500", func() {
doDetachPolicy(appId, http.StatusInternalServerError, "", components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusInternalServerError, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
})
Context("UAA api returns 401", func() {
BeforeEach(func() {
fakeCCNOAAUAA.Reset()
fakeCCNOAAUAA.AllowUnhandledRequests = true
fakeCCNOAAUAA.RouteToHandler("GET", "/v2/info", ghttp.RespondWithJSONEncoded(http.StatusOK,
cf.Endpoints{
TokenEndpoint: fakeCCNOAAUAA.URL(),
DopplerEndpoint: strings.Replace(fakeCCNOAAUAA.URL(), "http", "ws", 1),
}))
fakeCCNOAAUAA.RouteToHandler("POST", "/check_token", ghttp.RespondWithJSONEncoded(http.StatusOK,
struct {
Scope []string `json:"scope"`
}{
[]string{"cloud_controller.read", "cloud_controller.write", "password.write", "openid", "network.admin", "network.write", "uaa.user"},
}))
fakeCCNOAAUAA.RouteToHandler("GET", "/userinfo", ghttp.RespondWithJSONEncoded(http.StatusUnauthorized, struct{}{}))
})
Context("Create policy", func() {
It("should error with status code 401", func() {
By("check public api")
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusUnauthorized, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusUnauthorized, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
Context("Delete policy", func() {
BeforeEach(func() {
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusUnauthorized, components.Ports[APIPublicServer], httpClientForPublicApi)
})
It("should error with status code 401", func() {
doDetachPolicy(appId, http.StatusUnauthorized, "", components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusUnauthorized, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
})
Context("Check permission not passed", func() {
BeforeEach(func() {
fakeCCNOAAUAA.RouteToHandler("GET", checkUserSpaceRegPath, ghttp.RespondWithJSONEncoded(http.StatusOK,
struct {
TotalResults int `json:"total_results"`
}{
0,
}))
})
Context("Create policy", func() {
It("should error with status code 401", func() {
By("check public api")
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusUnauthorized, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusUnauthorized, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
Context("Delete policy", func() {
BeforeEach(func() {
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusUnauthorized, components.Ports[APIPublicServer], httpClientForPublicApi)
})
It("should error with status code 401", func() {
doDetachPolicy(appId, http.StatusUnauthorized, "", components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusUnauthorized, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
})
Context("Scheduler is down", func() {
JustBeforeEach(func() {
stopScheduler()
})
BeforeEach(func() {
provisionAndBind(serviceInstanceId, orgId, spaceId, bindingId, appId, nil, components.Ports[ServiceBroker], httpClientForPublicApi)
})
AfterEach(func() {
unbindAndDeprovision(bindingId, appId, serviceInstanceId, components.Ports[ServiceBroker], httpClientForPublicApi)
startScheduler()
})
Context("Create policy", func() {
Context("internal api", func() {
It("should not create policy", func() {
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusInternalServerError, components.Ports[APIServer], httpClient)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIServer], httpClient)
})
})
Context("public api", func() {
It("should not create policy", func() {
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusInternalServerError, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
})
Context("Delete policy", func() {
Context("internal api", func() {
BeforeEach(func() {
//attach a policy first with 4 recurring and 2 specific_date schedules
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIServer], httpClient)
})
It("should delete policy in API server", func() {
doDetachPolicy(appId, http.StatusInternalServerError, "", components.Ports[APIServer], httpClient)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIServer], httpClient)
})
})
Context("public api", func() {
BeforeEach(func() {
//attach a policy first with 4 recurring and 2 specific_date schedules
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIPublicServer], httpClientForPublicApi)
})
It("should delete policy in API server", func() {
doDetachPolicy(appId, http.StatusInternalServerError, "", components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
})
})
Describe("Create policy", func() {
BeforeEach(func() {
provisionAndBind(serviceInstanceId, orgId, spaceId, bindingId, appId, nil, components.Ports[ServiceBroker], httpClientForPublicApi)
})
AfterEach(func() {
unbindAndDeprovision(bindingId, appId, serviceInstanceId, components.Ports[ServiceBroker], httpClientForPublicApi)
})
Context("internal api", func() {
Context("Policies with schedules", func() {
It("creates a policy and associated schedules", func() {
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithSchedule.json"))
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIServer], httpClient)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIServer], httpClient)
assertScheduleContents(appId, http.StatusOK, map[string]int{"recurring_schedule": 4, "specific_date": 2})
})
It("fails with an invalid policy", func() {
policyStr = readPolicyFromFile("fakeInvalidPolicy.json")
doAttachPolicy(appId, policyStr, http.StatusBadRequest, components.Ports[APIServer], httpClient)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIServer], httpClient)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
Context("Policies without schedules", func() {
It("creates only the policy", func() {
policyStr = readPolicyFromFile("fakePolicyWithoutSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIServer], httpClient)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIServer], httpClient)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
})
Context("public api", func() {
Context("Policies with schedules", func() {
It("creates a policy and associated schedules", func() {
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithSchedule.json"))
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIPublicServer], httpClientForPublicApi)
assertScheduleContents(appId, http.StatusOK, map[string]int{"recurring_schedule": 4, "specific_date": 2})
})
It("fails with an invalid policy", func() {
policyStr = readPolicyFromFile("fakeInvalidPolicy.json")
doAttachPolicy(appId, policyStr, http.StatusBadRequest, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIPublicServer], httpClientForPublicApi)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
Context("Policies without schedules", func() {
It("creates only the policy", func() {
policyStr = readPolicyFromFile("fakePolicyWithoutSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIPublicServer], httpClientForPublicApi)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
})
})
Describe("Update policy", func() {
BeforeEach(func() {
provisionAndBind(serviceInstanceId, orgId, spaceId, bindingId, appId, nil, components.Ports[ServiceBroker], httpClientForPublicApi)
})
AfterEach(func() {
unbindAndDeprovision(bindingId, appId, serviceInstanceId, components.Ports[ServiceBroker], httpClientForPublicApi)
})
Context("internal api", func() {
Context("Update policies with schedules", func() {
BeforeEach(func() {
//attach a policy first with 4 recurring and 2 specific_date schedules
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithSchedule.json"))
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIServer], httpClient)
})
It("updates the policy and schedules", func() {
//attach another policy with 3 recurring and 1 specific_date schedules
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithScheduleAnother.json"))
doAttachPolicy(appId, policyStr, http.StatusOK, components.Ports[APIServer], httpClient)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIServer], httpClient)
assertScheduleContents(appId, http.StatusOK, map[string]int{"recurring_schedule": 3, "specific_date": 1})
})
})
})
Context("public api", func() {
Context("Update policies with schedules", func() {
BeforeEach(func() {
//attach a policy first with 4 recurring and 2 specific_date schedules
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithSchedule.json"))
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIPublicServer], httpClientForPublicApi)
})
It("updates the policy and schedules", func() {
//attach another policy with 3 recurring and 1 specific_date schedules
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithScheduleAnother.json"))
doAttachPolicy(appId, policyStr, http.StatusOK, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIPublicServer], httpClientForPublicApi)
assertScheduleContents(appId, http.StatusOK, map[string]int{"recurring_schedule": 3, "specific_date": 1})
})
})
})
})
Describe("Delete Policies", func() {
BeforeEach(func() {
provisionAndBind(serviceInstanceId, orgId, spaceId, bindingId, appId, nil, components.Ports[ServiceBroker], httpClientForPublicApi)
})
AfterEach(func() {
unbindAndDeprovision(bindingId, appId, serviceInstanceId, components.Ports[ServiceBroker], httpClientForPublicApi)
})
Context("internal api", func() {
Context("for a non-existing app", func() {
It("Should return a NOT FOUND (404)", func() {
doDetachPolicy(appId, http.StatusNotFound, `{"error":"No policy bound with application"}`, components.Ports[APIServer], httpClient)
})
})
Context("with an existing app", func() {
BeforeEach(func() {
//attach a policy first with 4 recurring and 2 specific_date schedules
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIServer], httpClient)
})
It("deletes the policy and schedules", func() {
doDetachPolicy(appId, http.StatusOK, "", components.Ports[APIServer], httpClient)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIServer], httpClient)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
})
Context("public api", func() {
Context("for a non-existing app", func() {
It("Should return a NOT FOUND (404)", func() {
doDetachPolicy(appId, http.StatusNotFound, `{"error":"No policy bound with application"}`, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
Context("with an existing app", func() {
BeforeEach(func() {
//attach a policy first with 4 recurring and 2 specific_date schedules
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIPublicServer], httpClientForPublicApi)
})
It("deletes the policy and schedules", func() {
doDetachPolicy(appId, http.StatusOK, "", components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIPublicServer], httpClientForPublicApi)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
})
})
})
Describe("When offered as a built-in experience", func() {
BeforeEach(func() {
apiServerConfPath = components.PrepareApiServerConfig(components.Ports[APIServer], components.Ports[APIPublicServer], false, 200, fakeCCNOAAUAA.URL(), dbUrl, fmt.Sprintf("https://127.0.0.1:%d", components.Ports[Scheduler]), fmt.Sprintf("https://127.0.0.1:%d", components.Ports[ScalingEngine]), fmt.Sprintf("https://127.0.0.1:%d", components.Ports[MetricsCollector]), fmt.Sprintf("https://127.0.0.1:%d", components.Ports[EventGenerator]), fmt.Sprintf("https://127.0.0.1:%d", components.Ports[ServiceBrokerInternal]), false, defaultHttpClientTimeout, 30, 30, tmpDir)
startApiServer()
resp, err := detachPolicy(appId, components.Ports[APIServer], httpClient)
Expect(err).NotTo(HaveOccurred())
resp.Body.Close()
})
AfterEach(func() {
stopApiServer()
})
Describe("Create policy", func() {
Context("internal api", func() {
Context("Policies with schedules", func() {
It("creates a policy and associated schedules", func() {
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithSchedule.json"))
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIServer], httpClient)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIServer], httpClient)
assertScheduleContents(appId, http.StatusOK, map[string]int{"recurring_schedule": 4, "specific_date": 2})
})
It("fails with an invalid policy", func() {
policyStr = readPolicyFromFile("fakeInvalidPolicy.json")
doAttachPolicy(appId, policyStr, http.StatusBadRequest, components.Ports[APIServer], httpClient)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIServer], httpClient)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
Context("Policies without schedules", func() {
It("creates only the policy", func() {
policyStr = readPolicyFromFile("fakePolicyWithoutSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIServer], httpClient)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIServer], httpClient)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
})
Context("public api", func() {
Context("Policies with schedules", func() {
It("creates a policy and associated schedules", func() {
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithSchedule.json"))
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIPublicServer], httpClientForPublicApi)
assertScheduleContents(appId, http.StatusOK, map[string]int{"recurring_schedule": 4, "specific_date": 2})
})
It("fails with an invalid policy", func() {
policyStr = readPolicyFromFile("fakeInvalidPolicy.json")
doAttachPolicy(appId, policyStr, http.StatusBadRequest, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIPublicServer], httpClientForPublicApi)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
Context("Policies without schedules", func() {
It("creates only the policy", func() {
policyStr = readPolicyFromFile("fakePolicyWithoutSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIPublicServer], httpClientForPublicApi)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
})
})
Describe("Update policy", func() {
Context("internal api", func() {
Context("Update policies with schedules", func() {
BeforeEach(func() {
//attach a policy first with 4 recurring and 2 specific_date schedules
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithSchedule.json"))
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIServer], httpClient)
})
It("updates the policy and schedules", func() {
//attach another policy with 3 recurring and 1 specific_date schedules
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithScheduleAnother.json"))
doAttachPolicy(appId, policyStr, http.StatusOK, components.Ports[APIServer], httpClient)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIServer], httpClient)
assertScheduleContents(appId, http.StatusOK, map[string]int{"recurring_schedule": 3, "specific_date": 1})
})
})
})
Context("public api", func() {
Context("Update policies with schedules", func() {
BeforeEach(func() {
//attach a policy first with 4 recurring and 2 specific_date schedules
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithSchedule.json"))
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIPublicServer], httpClientForPublicApi)
})
It("updates the policy and schedules", func() {
//attach another policy with 3 recurring and 1 specific_date schedules
policyStr = setPolicyRecurringDate(readPolicyFromFile("fakePolicyWithScheduleAnother.json"))
doAttachPolicy(appId, policyStr, http.StatusOK, components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerContent(appId, policyStr, http.StatusOK, components.Ports[APIPublicServer], httpClientForPublicApi)
assertScheduleContents(appId, http.StatusOK, map[string]int{"recurring_schedule": 3, "specific_date": 1})
})
})
})
})
Describe("Delete Policies", func() {
Context("internal api", func() {
Context("for a non-existing app", func() {
It("Should return a NOT FOUND (404)", func() {
doDetachPolicy(appId, http.StatusNotFound, `{"error":"No policy bound with application"}`, components.Ports[APIServer], httpClient)
})
})
Context("with an existing app", func() {
BeforeEach(func() {
//attach a policy first with 4 recurring and 2 specific_date schedules
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIServer], httpClient)
})
It("deletes the policy and schedules", func() {
doDetachPolicy(appId, http.StatusOK, "", components.Ports[APIServer], httpClient)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIServer], httpClient)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
})
Context("public api", func() {
Context("for a non-existing app", func() {
It("Should return a NOT FOUND (404)", func() {
doDetachPolicy(appId, http.StatusNotFound, `{"error":"No policy bound with application"}`, components.Ports[APIPublicServer], httpClientForPublicApi)
})
})
Context("with an existing app", func() {
BeforeEach(func() {
//attach a policy first with 4 recurring and 2 specific_date schedules
policyStr = readPolicyFromFile("fakePolicyWithSchedule.json")
doAttachPolicy(appId, policyStr, http.StatusCreated, components.Ports[APIPublicServer], httpClientForPublicApi)
})
It("deletes the policy and schedules", func() {
doDetachPolicy(appId, http.StatusOK, "", components.Ports[APIPublicServer], httpClientForPublicApi)
checkApiServerStatus(appId, http.StatusNotFound, components.Ports[APIPublicServer], httpClientForPublicApi)
checkSchedulerStatus(appId, http.StatusNotFound)
})
})
})
})
})
})
|
rvernagus/irdb | spec/oracle/spec_helper.rb | dir_path = File.dirname(__FILE__)
lib_path = File.expand_path(dir_path + "/../../lib")
$:.unshift lib_path unless $:.include? lib_path
require "irdb"
require "yaml"
include IRDb
$config = YAML.load_file(dir_path + "/config.yaml")
module OracleHelper
def get_database
provider_factory = DbProviderFactory.new
provider = provider_factory.create_provider($config[:provider])
db = Database.new(provider, $config[:cstr])
$config[:setup].each do |sql|
db.execute_non_query(sql) rescue nil
end
db
end
end
|
NextCenturyCorporation/nucleus-data-server | server/src/test/java/com/ncc/neon/services/StateServiceTest.java | package com.ncc.neon.services;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import static org.junit.Assert.assertEquals;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import com.ncc.neon.models.PagedList;
import com.ncc.neon.services.StateService.StateServiceFailureException;
import com.ncc.neon.services.StateService.StateServiceMissingFileException;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = StateService.class)
public class StateServiceTest {
private static Map DATA_NEON;
private static Map DATA_NONE;
private static Map DATA_NULL;
private static Map DATA_TEST;
private static String JSON_NEON_CONFIG = "jsonNeonConfig";
private static String JSON_NORMAL_EXTENSION = "jsonNormalExtension";
private static String JSON_CAPITALIZED_EXTENSION = "jsonCapitalizedExtension";
private static String JSON_TEXT_EXTENSION = "jsonTextExtension";
private static String JSON_CAPITALIZED_TEXT_EXTENSION = "jsonCapitalizedTextExtension";
private static String JSON_NO_EXTENSION = "jsonNoExtension";
private static String JSON_YAML_EXTENSION = "jsonYamlExtension";
private static String JSON_EMPTY_OBJECT = "jsonEmptyObject";
private static String YAML_NEON_CONFIG = "yamlNeonConfig";
private static String YAML_NORMAL_EXTENSION = "yamlNormalExtension";
private static String YAML_CAPITALIZED_EXTENSION = "yamlCapitalizedExtension";
private static String YAML_ABBREVIATED_EXTENSION = "yamlAbbreviatedExtension";
private static String YAML_CAPITALIZED_ABBREVIATED_EXTENSION = "yamlCapitalizedAbbreviatedExtension";
private static String YAML_TEXT_EXTENSION = "yamlTextExtension";
private static String YAML_CAPITALIZED_TEXT_EXTENSION = "yamlCapitalizedTextExtension";
private static String YAML_NO_EXTENSION = "yamlNoExtension";
private static String YAML_JSON_EXTENSION = "yamlJsonExtension";
private static String YAML_EMPTY = "yamlEmpty";
private static ObjectMapper JSON_MAPPER = new ObjectMapper();
private static ObjectMapper YAML_MAPPER = new ObjectMapper(new YAMLFactory());
private static String RESOURCE_DIRECTORY = "src/test/resources";
private static String STATE_DIRECTORY = RESOURCE_DIRECTORY + "/states";
private static StateService STATE_SERVICE;
private static Path EMPTY_STATE_DIRECTORY;
private static StateService EMPTY_STATE_SERVICE;
@BeforeClass
public static void setup() {
try {
EMPTY_STATE_DIRECTORY = Files.createTempDirectory("no_states_");
EMPTY_STATE_SERVICE = new StateService(EMPTY_STATE_DIRECTORY.toString());
}
catch (IOException e) {
fail(e.toString());
}
STATE_SERVICE = new StateService(STATE_DIRECTORY);
try {
DATA_NULL = JSON_MAPPER.readValue("null", LinkedHashMap.class);
DATA_NONE = JSON_MAPPER.readValue("{}", LinkedHashMap.class);
DATA_NEON = JSON_MAPPER.readValue("{ \"dashboards\": { \"name\": \"dashboard1\" }," +
"\"datastores\": { \"datastore1\": {} }, \"layouts\": { \"layout1\": [] }, \"options\": {} }",
LinkedHashMap.class);
DATA_TEST = JSON_MAPPER.readValue(
"{ \"list\": [\"a\", \"b\", \"c\"], \"number\": 123, \"object\": { \"key\": \"value\" }, \"string\": \"test\" }",
LinkedHashMap.class);
}
catch (IOException e) {
fail(e.toString());
}
}
@Test
public void deleteStateTest() {
try {
File testFile = new File(STATE_DIRECTORY + "/testStateName.yaml");
testFile.createNewFile();
assertThat(testFile.exists()).isEqualTo(true);
STATE_SERVICE.deleteState("testStateName");
assertThat(testFile.exists()).isEqualTo(false);
}
catch (IOException | StateServiceFailureException | StateServiceMissingFileException e) {
fail(e.toString());
}
}
@Test
public void deleteStateWithInvalidNameTest() {
try {
File testFile = new File(STATE_DIRECTORY + "/folder.my-test.state_name1234.yaml");
testFile.createNewFile();
assertThat(testFile.exists()).isEqualTo(true);
STATE_SERVICE.deleteState("../folder/my-test.!@#$%^&*state_name~`?\\1234");
assertThat(testFile.exists()).isEqualTo(false);
}
catch (IOException | StateServiceFailureException | StateServiceMissingFileException e) {
fail(e.toString());
}
}
@Test
public void findStateNamesTest() {
HashSet<String> actual = new HashSet<String>(
Arrays.asList(
Arrays.asList(
STATE_SERVICE.listStates(100, 0).getResults()
)
.stream()
.map(x -> ((String) x.get("fileName")).replaceFirst("[.][^.]*$", ""))
.toArray(n -> new String[n])
)
);
assertThat(actual.contains(JSON_NEON_CONFIG)).isEqualTo(true);
assertThat(actual.contains(JSON_NORMAL_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(JSON_CAPITALIZED_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(JSON_TEXT_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(JSON_CAPITALIZED_TEXT_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(JSON_NO_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(JSON_YAML_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(JSON_EMPTY_OBJECT)).isEqualTo(true);
assertThat(actual.contains(YAML_NEON_CONFIG)).isEqualTo(true);
assertThat(actual.contains(YAML_NORMAL_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(YAML_CAPITALIZED_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(YAML_ABBREVIATED_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(YAML_CAPITALIZED_ABBREVIATED_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(YAML_TEXT_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(YAML_CAPITALIZED_TEXT_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(YAML_NO_EXTENSION)).isEqualTo(true);
assertThat(actual.contains(YAML_JSON_EXTENSION)).isEqualTo(true);
// assertThat(actual.contains(YAML_EMPTY)).isEqualTo(true);
}
@Test
public void findStateNamesWithNoPreviousStatesTest() {
assertThat(EMPTY_STATE_SERVICE.listStates(0, 0).getResults()).isEqualTo(new Map[0]);
}
@Test
public void pagingThroughListOfStates() {
StateService service = new StateService(RESOURCE_DIRECTORY + "/states-ordered");
PagedList list1 = service.listStates(5, 0);
assertEquals(list1.getTotal(), 19);
PagedList list2 = service.listStates(5, 5);
PagedList list3 = service.listStates(5, 10);
PagedList list4 = service.listStates(5, 15);
PagedList list5 = service.listStates(5, 20);
assertEquals(list1.getResults().length, 5);
assertEquals(list2.getResults().length, 5);
assertEquals(list3.getResults().length, 5);
assertEquals(list4.getResults().length, 4);
assertEquals(list5.getResults().length, 0);
}
@Test
public void loadStateWithJsonFormatTest() {
try {
assertThat(STATE_SERVICE.loadState(JSON_NEON_CONFIG, false)).isEqualTo(DATA_NEON);
assertThat(STATE_SERVICE.loadState(JSON_NORMAL_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(JSON_CAPITALIZED_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(JSON_TEXT_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(JSON_CAPITALIZED_TEXT_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(JSON_NO_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(JSON_YAML_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(JSON_EMPTY_OBJECT, false)).isEqualTo(DATA_NONE);
}
catch (StateServiceFailureException | StateServiceMissingFileException e) {
fail(e.toString());
}
}
@Test
public void loadStateWithYamlFormatTest() {
try {
assertThat(STATE_SERVICE.loadState(YAML_NEON_CONFIG, false)).isEqualTo(DATA_NEON);
assertThat(STATE_SERVICE.loadState(YAML_NORMAL_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(YAML_CAPITALIZED_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(YAML_ABBREVIATED_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(YAML_CAPITALIZED_ABBREVIATED_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(YAML_TEXT_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(YAML_CAPITALIZED_TEXT_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(YAML_NO_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(YAML_JSON_EXTENSION, false)).isEqualTo(DATA_TEST);
assertThat(STATE_SERVICE.loadState(YAML_EMPTY, false)).isEqualTo(DATA_NULL);
}
catch (StateServiceFailureException | StateServiceMissingFileException e) {
fail(e.toString());
}
}
@Test
public void saveStateTest() {
try {
Map testStateData = JSON_MAPPER.readValue("{ \"a\": \"test\", \"b\": 1234, \"c\": [], \"d\": {} }", LinkedHashMap.class);
STATE_SERVICE.saveState("testStateName", testStateData);
File testFile = new File(STATE_DIRECTORY + "/testStateName.yaml");
assertThat(testFile.exists()).isEqualTo(true);
Map actual = YAML_MAPPER.readValue(testFile, LinkedHashMap.class);
assertThat(actual).isEqualTo(testStateData);
testFile.delete();
assertThat(testFile.exists()).isEqualTo(false);
}
catch (IOException | StateServiceFailureException e) {
fail(e.toString());
}
}
@Test
public void saveStateWithInvalidNameTest() {
try {
Map testStateData = JSON_MAPPER.readValue("{ \"a\": \"test\", \"b\": 1234, \"c\": [], \"d\": {} }", LinkedHashMap.class);
STATE_SERVICE.saveState("../folder/my-test.!@#$%^&*state_name~`?\\1234", testStateData);
File testFile = new File(STATE_DIRECTORY + "/folder.my-test.state_name1234.yaml");
assertThat(testFile.exists()).isEqualTo(true);
Map actual = YAML_MAPPER.readValue(testFile, LinkedHashMap.class);
assertThat(actual).isEqualTo(testStateData);
testFile.delete();
assertThat(testFile.exists()).isEqualTo(false);
}
catch (IOException | StateServiceFailureException e) {
fail(e.toString());
}
}
@Test
public void saveStateWithNoPreviousStatesTest() {
try {
Map testStateData = JSON_MAPPER.readValue("{ \"a\": \"test\", \"b\": 1234, \"c\": [], \"d\": {} }", LinkedHashMap.class);
EMPTY_STATE_SERVICE.saveState("testStateName", testStateData);
File testFile = new File(EMPTY_STATE_DIRECTORY.toFile(), "testStateName.yaml");
assertThat(testFile.exists()).isEqualTo(true);
Map actual = YAML_MAPPER.readValue(testFile, LinkedHashMap.class);
assertThat(actual).isEqualTo(testStateData);
testFile.delete();
assertThat(testFile.exists()).isEqualTo(false);
}
catch (IOException | StateServiceFailureException e) {
fail(e.toString());
}
}
@Test
public void saveStateToOverwritePreviousStateTest() {
try {
Map prevStateData = JSON_MAPPER.readValue("{ \"b\": [1234], \"c\": { \"d\": 5678 }, \"e\": \"prev\" }", LinkedHashMap.class);
File testFile = new File(STATE_DIRECTORY + "/testStateName.yaml");
JSON_MAPPER.writeValue(testFile, prevStateData);
Map testStateData = JSON_MAPPER.readValue("{ \"a\": \"test\", \"b\": 1234, \"c\": [], \"d\": {} }", LinkedHashMap.class);
STATE_SERVICE.saveState("testStateName", testStateData);
assertThat(testFile.exists()).isEqualTo(true);
Map actual = YAML_MAPPER.readValue(testFile, LinkedHashMap.class);
assertThat(actual).isEqualTo(testStateData);
testFile.delete();
assertThat(testFile.exists()).isEqualTo(false);
}
catch (IOException | StateServiceFailureException e) {
fail(e.toString());
}
}
}
|
dbflute/dbflute-core | dbflute-engine/src/main/java/org/dbflute/logic/replaceschema/finalinfo/DfAbstractSchemaTaskFinalInfo.java | /*
* Copyright 2014-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.logic.replaceschema.finalinfo;
import java.util.ArrayList;
import java.util.List;
import org.dbflute.util.Srl;
/**
* @author jflute
*/
public class DfAbstractSchemaTaskFinalInfo {
protected String _resultMessage;
protected final List<String> _detailMessageList = new ArrayList<String>();
protected boolean _failure;
protected Long _processPerformanceMillis; // null allowed (almost exists after execution however not required)
public boolean isValidInfo() {
return Srl.is_NotNull_and_NotTrimmedEmpty(_resultMessage);
}
public String getResultMessage() {
return _resultMessage;
}
public void setResultMessage(String resultMessage) {
_resultMessage = resultMessage;
}
public List<String> getDetailMessageList() {
return _detailMessageList;
}
public void addDetailMessage(String detailMessage) {
_detailMessageList.add(detailMessage);
}
public boolean isFailure() {
return _failure;
}
public void setFailure(boolean failure) {
_failure = failure;
}
public Long getProcessPerformanceMillis() {
return _processPerformanceMillis;
}
public void setProcessPerformanceMillis(Long processPerformanceMillis) {
_processPerformanceMillis = processPerformanceMillis;
}
}
|
r-kaminski/innovativeproject-inventory-of-supplies | mobile/screens/StocktakingsContainer.js | <gh_stars>1-10
import React from 'react';
import { FlatList, StyleSheet, Text } from 'react-native';
import { ListItem } from "react-native-elements";
import { getStocktakings } from "../services/StocktakingService";
export default class StocktakingsScreen extends React.Component {
state = {
pageSize: 8,
"count": 0,
refreshing: false,
results: [],
page: 1
};
componentDidMount() {
this._onRefresh(1)
}
_onRefresh = (page) => {
this.setState({ refreshing: true, page: page ? page : 1 });
this.fetchData(page ? page : 1).then(() => {
this.setState({ refreshing: false });
});
}
async fetchData(page) {
await getStocktakings({
page: page,
page_size: this.state.pageSize,
name: this.props.search
}).then((res) => {
{
page === 1 ?
this.setState({ ...this.state, ...res, page: 1 })
: this.setState({ ...this.state, ...res, results: [...this.state.results, ...res.results] })
}
})
}
render() {
return (
<FlatList style={styles.container}
data={this.state.results}
keyExtractor={item => item.id.toString()}
renderItem={({ item, index }) => <ListItem
style={styles.listItem}
key={index}
title={item.name}
subtitle={
<Text style={styles.subtitle}
ellipsizeMode={'tail'}
numberOfLines={1}
>{item.date}</Text>
}
onPress={() => this._handlePressStocktaking(item.id)}
/>}
refreshing={this.state.refreshing}
onRefresh={() => this._onRefresh(1)}
/>
);
}
_handlePressStocktaking = (id) => {
const { navigate } = this.props.nav;
navigate('StocktakingScreen', { id: id })
};
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#fff',
},
contentContainer: {
paddingTop: 30,
},
listItem: {
borderBottomWidth: 1,
borderColor: '#d0d0d0'
},
subtitle: {
color: '#d0d0d0',
},
});
|
jgrancell/kubenv | settings/configuration_test.go | package settings
import (
"testing"
)
func TestLoadConfiguration(t *testing.T) {
defaultConf, err := LoadConfiguration()
if err != nil {
t.Errorf(err.Error())
}
if defaultConf.KubeDir != defaultConf.EnvDir {
t.Errorf("Expected KubeDir and EnvDir to be equal, got %s %s", defaultConf.KubeDir, defaultConf.EnvDir)
}
// Testing with custom parameters
// os.Setenv("KUBENV_CONFIG", "testsuite/.kubenv.json")
// conf, err := LoadConfiguration()
// if err != nil {
// t.Errorf(err.Error())
// }
if defaultConf.KubeDir == "" {
t.Errorf("KubeDir should not be nil. Got %s", defaultConf.KubeDir)
}
if defaultConf.EnvDir == "" {
t.Errorf("EnvDir should not be nil. Got %s", defaultConf.EnvDir)
}
if defaultConf.KubeConf == "" {
t.Errorf("KubeConf should not be nil. Got %s", defaultConf.KubeConf)
}
}
|
marc2k3/foo_jscript_panel | src/Panel/PanelManager.h | #pragma once
class PanelTimer;
class PanelManager
{
public:
PanelManager();
~PanelManager();
static PanelManager& instance();
uint32_t create_timer(CWindow hwnd, IDispatch* pdisp, uint32_t delay, bool execute_once);
void add_window(CWindow hwnd);
void post_msg_to_all(CallbackID id, WPARAM wp = 0);
void post_msg_to_all_pointer(CallbackID id, pfc::refcounted_object_root* param, HWND except = nullptr);
void remove_timer(HANDLE timer_handle, uint32_t timer_id);
void remove_window(CWindow hwnd);
void stop_timer(CWindow hwnd, uint32_t timer_id);
void stop_timers(CWindow hwnd);
void timer_invoke(uint32_t timer_id);
void unload_all();
private:
using TimerMap = std::map<uint32_t, std::unique_ptr<PanelTimer>>;
HANDLE m_timer_queue;
TimerMap m_timer_map;
std::mutex m_mutex;
std::set<CWindow> m_hwnds;
uint32_t m_cur_timer_id = 0;
PFC_CLASS_NOT_COPYABLE_EX(PanelManager)
};
|
shaojiankui/iOS10-Runtime-Headers | PrivateFrameworks/PrototypeTools.framework/PTSButtonCell.h | /* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/PrototypeTools.framework/PrototypeTools
*/
@interface PTSButtonCell : UITableViewCell
- (id)initWithReuseIdentifier:(id)arg1;
@end
|
smargh/homebrew-fonts | Casks/font-kisiska.rb | cask :v1 => 'font-kisiska' do
version :latest
sha256 :no_check
url 'http://www.languagegeek.com/font/kisiska.zip'
homepage 'http://www.languagegeek.com/font/fontdownload.html'
license :unknown
font 'kisiska.otf'
end
|
jacksund/simmate | src/simmate/configuration/dask/client.py | <gh_stars>1-10
# -*- coding: utf-8 -*-
from dask.distributed import Client, get_client
def get_dask_client(**kwargs) -> Client:
"""
This is a convenience utility that grabs the client for the local Dask cluster
if it exists -- and if not, creates a new cluster and returns the client for
it.
#### Parameters
- `**kwargs`:
Any arguments normally accepted by dask.distributed.Client. The exception
to this is the `preload` kwarg, which is not allowed.
"""
# First, try accessing a global client.
try:
client = get_client()
# If the line above fails, it's because no global client exists yet. In that
# case, we make a new cluster and return the client for it.
except ValueError:
# This preload script connects each worker to the Simmate database
client = Client(
preload="simmate.configuration.dask.connect_to_database",
**kwargs,
)
# TODO: To all default job-queue clusters, consider using...
# from simmate.configuration.dask.setup_cluster import run_cluster
# cluster = run_cluster(...)
# client = Client(cluster.scheduler.address)
# OPTIMIZE: I'm not sure if there's a better way to do implement this.
# If this gives issues, I can alternatively try...
# from dask.distributed import client
# client._get_global_client()
# ... based on https://stackoverflow.com/questions/59070260/
return client
|
democat3457/BetterWithAddons | src/main/java/betterwithaddons/tileentity/TileEntityPress.java | <filename>src/main/java/betterwithaddons/tileentity/TileEntityPress.java
package betterwithaddons.tileentity;
import betterwithmods.api.tile.IMechanicalPower;
import net.minecraft.block.Block;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
public class TileEntityPress extends TileEntityBase implements IMechanicalPower {
private int progress;
@Override
public int getMechanicalOutput(EnumFacing facing) {
return 0;
}
@Override
public int getMechanicalInput(EnumFacing facing) {
return 0;
}
@Override
public int getMaximumInput(EnumFacing facing) {
return 0;
}
@Override
public int getMinimumInput(EnumFacing facing) {
return 0;
}
@Override
public Block getBlock() {
return blockType;
}
@Override
public World getBlockWorld() {
return world;
}
@Override
public BlockPos getBlockPos() {
return pos;
}
@Override
public void writeDataToNBT(NBTTagCompound compound) {
}
@Override
public void readDataFromNBT(NBTTagCompound compound) {
}
}
|
Andreas237/AndroidPolicyAutomation | ExtractedJars/Shopkick_com.shopkick.app/javafiles/android/support/v4/app/NotificationCompat$WearableExtender.java | <reponame>Andreas237/AndroidPolicyAutomation<filename>ExtractedJars/Shopkick_com.shopkick.app/javafiles/android/support/v4/app/NotificationCompat$WearableExtender.java
// Decompiled by Jad v1.5.8g. Copyright 2001 <NAME>.
// Jad home page: http://www.kpdus.com/jad.html
// Decompiler options: packimports(3) annotate safe
package android.support.v4.app;
import android.app.Notification;
import android.app.PendingIntent;
import android.graphics.Bitmap;
import android.os.Bundle;
import java.util.*;
// Referenced classes of package android.support.v4.app:
// NotificationCompat, NotificationCompatJellybean, RemoteInput
public static final class NotificationCompat$WearableExtender
implements NotificationCompat.Extender
{
private static android.app.Notification.Action getActionFromActionCompat(NotificationCompat.Action action)
{
android.app.Notification.Action.Builder builder = new android.app.Notification.Action.Builder(action.getIcon(), action.getTitle(), action.getActionIntent());
// 0 0:new #223 <Class android.app.Notification$Action$Builder>
// 1 3:dup
// 2 4:aload_0
// 3 5:invokevirtual #226 <Method int NotificationCompat$Action.getIcon()>
// 4 8:aload_0
// 5 9:invokevirtual #230 <Method CharSequence NotificationCompat$Action.getTitle()>
// 6 12:aload_0
// 7 13:invokevirtual #234 <Method PendingIntent NotificationCompat$Action.getActionIntent()>
// 8 16:invokespecial #237 <Method void android.app.Notification$Action$Builder(int, CharSequence, PendingIntent)>
// 9 19:astore 4
Bundle bundle;
if(action.getExtras() != null)
//* 10 21:aload_0
//* 11 22:invokevirtual #240 <Method Bundle NotificationCompat$Action.getExtras()>
//* 12 25:ifnull 43
bundle = new Bundle(action.getExtras());
// 13 28:new #136 <Class Bundle>
// 14 31:dup
// 15 32:aload_0
// 16 33:invokevirtual #240 <Method Bundle NotificationCompat$Action.getExtras()>
// 17 36:invokespecial #243 <Method void Bundle(Bundle)>
// 18 39:astore_3
else
//* 19 40:goto 51
bundle = new Bundle();
// 20 43:new #136 <Class Bundle>
// 21 46:dup
// 22 47:invokespecial #244 <Method void Bundle()>
// 23 50:astore_3
bundle.putBoolean("android.support.allowGeneratedReplies", action.getAllowGeneratedReplies());
// 24 51:aload_3
// 25 52:ldc1 #246 <String "android.support.allowGeneratedReplies">
// 26 54:aload_0
// 27 55:invokevirtual #250 <Method boolean NotificationCompat$Action.getAllowGeneratedReplies()>
// 28 58:invokevirtual #254 <Method void Bundle.putBoolean(String, boolean)>
if(android.os.Build.VERSION.SDK_INT >= 24)
//* 29 61:getstatic #149 <Field int android.os.Build$VERSION.SDK_INT>
//* 30 64:bipush 24
//* 31 66:icmplt 79
builder.setAllowGeneratedReplies(action.getAllowGeneratedReplies());
// 32 69:aload 4
// 33 71:aload_0
// 34 72:invokevirtual #250 <Method boolean NotificationCompat$Action.getAllowGeneratedReplies()>
// 35 75:invokevirtual #258 <Method android.app.Notification$Action$Builder android.app.Notification$Action$Builder.setAllowGeneratedReplies(boolean)>
// 36 78:pop
builder.addExtras(bundle);
// 37 79:aload 4
// 38 81:aload_3
// 39 82:invokevirtual #262 <Method android.app.Notification$Action$Builder android.app.Notification$Action$Builder.addExtras(Bundle)>
// 40 85:pop
action = ((NotificationCompat.Action) (action.getRemoteInputs()));
// 41 86:aload_0
// 42 87:invokevirtual #266 <Method RemoteInput[] NotificationCompat$Action.getRemoteInputs()>
// 43 90:astore_0
if(action != null)
//* 44 91:aload_0
//* 45 92:ifnull 126
{
action = ((NotificationCompat.Action) (RemoteInput.fromCompat(((RemoteInput []) (action)))));
// 46 95:aload_0
// 47 96:invokestatic #272 <Method android.app.RemoteInput[] RemoteInput.fromCompat(RemoteInput[])>
// 48 99:astore_0
int j = action.length;
// 49 100:aload_0
// 50 101:arraylength
// 51 102:istore_2
for(int i = 0; i < j; i++)
//* 52 103:iconst_0
//* 53 104:istore_1
//* 54 105:iload_1
//* 55 106:iload_2
//* 56 107:icmpge 126
builder.addRemoteInput(((android.app.RemoteInput) (action[i])));
// 57 110:aload 4
// 58 112:aload_0
// 59 113:iload_1
// 60 114:aaload
// 61 115:invokevirtual #276 <Method android.app.Notification$Action$Builder android.app.Notification$Action$Builder.addRemoteInput(android.app.RemoteInput)>
// 62 118:pop
// 63 119:iload_1
// 64 120:iconst_1
// 65 121:iadd
// 66 122:istore_1
}
//* 67 123:goto 105
return builder.build();
// 68 126:aload 4
// 69 128:invokevirtual #280 <Method android.app.Notification$Action android.app.Notification$Action$Builder.build()>
// 70 131:areturn
}
private void setFlag(int i, boolean flag)
{
if(flag)
//* 0 0:iload_2
//* 1 1:ifeq 15
{
mFlags = i | mFlags;
// 2 4:aload_0
// 3 5:iload_1
// 4 6:aload_0
// 5 7:getfield #118 <Field int mFlags>
// 6 10:ior
// 7 11:putfield #118 <Field int mFlags>
return;
// 8 14:return
} else
{
mFlags = i & mFlags;
// 9 15:aload_0
// 10 16:iload_1
// 11 17:aload_0
// 12 18:getfield #118 <Field int mFlags>
// 13 21:iand
// 14 22:putfield #118 <Field int mFlags>
return;
// 15 25:return
}
}
public NotificationCompat$WearableExtender addAction(NotificationCompat.Action action)
{
mActions.add(((Object) (action)));
// 0 0:aload_0
// 1 1:getfield #116 <Field ArrayList mActions>
// 2 4:aload_1
// 3 5:invokevirtual #289 <Method boolean ArrayList.add(Object)>
// 4 8:pop
return this;
// 5 9:aload_0
// 6 10:areturn
}
public NotificationCompat$WearableExtender addActions(List list)
{
mActions.addAll(((java.util.Collection) (list)));
// 0 0:aload_0
// 1 1:getfield #116 <Field ArrayList mActions>
// 2 4:aload_1
// 3 5:invokevirtual #294 <Method boolean ArrayList.addAll(java.util.Collection)>
// 4 8:pop
return this;
// 5 9:aload_0
// 6 10:areturn
}
public NotificationCompat$WearableExtender addPage(Notification notification)
{
mPages.add(((Object) (notification)));
// 0 0:aload_0
// 1 1:getfield #120 <Field ArrayList mPages>
// 2 4:aload_1
// 3 5:invokevirtual #289 <Method boolean ArrayList.add(Object)>
// 4 8:pop
return this;
// 5 9:aload_0
// 6 10:areturn
}
public NotificationCompat$WearableExtender addPages(List list)
{
mPages.addAll(((java.util.Collection) (list)));
// 0 0:aload_0
// 1 1:getfield #120 <Field ArrayList mPages>
// 2 4:aload_1
// 3 5:invokevirtual #294 <Method boolean ArrayList.addAll(java.util.Collection)>
// 4 8:pop
return this;
// 5 9:aload_0
// 6 10:areturn
}
public NotificationCompat$WearableExtender clearActions()
{
mActions.clear();
// 0 0:aload_0
// 1 1:getfield #116 <Field ArrayList mActions>
// 2 4:invokevirtual #305 <Method void ArrayList.clear()>
return this;
// 3 7:aload_0
// 4 8:areturn
}
public NotificationCompat$WearableExtender clearPages()
{
mPages.clear();
// 0 0:aload_0
// 1 1:getfield #120 <Field ArrayList mPages>
// 2 4:invokevirtual #305 <Method void ArrayList.clear()>
return this;
// 3 7:aload_0
// 4 8:areturn
}
public NotificationCompat$WearableExtender clone()
{
NotificationCompat$WearableExtender notificationcompat$wearableextender = new NotificationCompat$WearableExtender();
// 0 0:new #2 <Class NotificationCompat$WearableExtender>
// 1 3:dup
// 2 4:invokespecial #308 <Method void NotificationCompat$WearableExtender()>
// 3 7:astore_1
notificationcompat$wearableextender.mActions = new ArrayList(((java.util.Collection) (mActions)));
// 4 8:aload_1
// 5 9:new #113 <Class ArrayList>
// 6 12:dup
// 7 13:aload_0
// 8 14:getfield #116 <Field ArrayList mActions>
// 9 17:invokespecial #311 <Method void ArrayList(java.util.Collection)>
// 10 20:putfield #116 <Field ArrayList mActions>
notificationcompat$wearableextender.mFlags = mFlags;
// 11 23:aload_1
// 12 24:aload_0
// 13 25:getfield #118 <Field int mFlags>
// 14 28:putfield #118 <Field int mFlags>
notificationcompat$wearableextender.mDisplayIntent = mDisplayIntent;
// 15 31:aload_1
// 16 32:aload_0
// 17 33:getfield #191 <Field PendingIntent mDisplayIntent>
// 18 36:putfield #191 <Field PendingIntent mDisplayIntent>
notificationcompat$wearableextender.mPages = new ArrayList(((java.util.Collection) (mPages)));
// 19 39:aload_1
// 20 40:new #113 <Class ArrayList>
// 21 43:dup
// 22 44:aload_0
// 23 45:getfield #120 <Field ArrayList mPages>
// 24 48:invokespecial #311 <Method void ArrayList(java.util.Collection)>
// 25 51:putfield #120 <Field ArrayList mPages>
notificationcompat$wearableextender.mBackground = mBackground;
// 26 54:aload_1
// 27 55:aload_0
// 28 56:getfield #199 <Field Bitmap mBackground>
// 29 59:putfield #199 <Field Bitmap mBackground>
notificationcompat$wearableextender.mContentIcon = mContentIcon;
// 30 62:aload_1
// 31 63:aload_0
// 32 64:getfield #204 <Field int mContentIcon>
// 33 67:putfield #204 <Field int mContentIcon>
notificationcompat$wearableextender.mContentIconGravity = mContentIconGravity;
// 34 70:aload_1
// 35 71:aload_0
// 36 72:getfield #122 <Field int mContentIconGravity>
// 37 75:putfield #122 <Field int mContentIconGravity>
notificationcompat$wearableextender.mContentActionIndex = mContentActionIndex;
// 38 78:aload_1
// 39 79:aload_0
// 40 80:getfield #124 <Field int mContentActionIndex>
// 41 83:putfield #124 <Field int mContentActionIndex>
notificationcompat$wearableextender.mCustomSizePreset = mCustomSizePreset;
// 42 86:aload_1
// 43 87:aload_0
// 44 88:getfield #126 <Field int mCustomSizePreset>
// 45 91:putfield #126 <Field int mCustomSizePreset>
notificationcompat$wearableextender.mCustomContentHeight = mCustomContentHeight;
// 46 94:aload_1
// 47 95:aload_0
// 48 96:getfield #206 <Field int mCustomContentHeight>
// 49 99:putfield #206 <Field int mCustomContentHeight>
notificationcompat$wearableextender.mGravity = mGravity;
// 50 102:aload_1
// 51 103:aload_0
// 52 104:getfield #128 <Field int mGravity>
// 53 107:putfield #128 <Field int mGravity>
notificationcompat$wearableextender.mHintScreenTimeout = mHintScreenTimeout;
// 54 110:aload_1
// 55 111:aload_0
// 56 112:getfield #208 <Field int mHintScreenTimeout>
// 57 115:putfield #208 <Field int mHintScreenTimeout>
notificationcompat$wearableextender.mDismissalId = mDismissalId;
// 58 118:aload_1
// 59 119:aload_0
// 60 120:getfield #214 <Field String mDismissalId>
// 61 123:putfield #214 <Field String mDismissalId>
notificationcompat$wearableextender.mBridgeTag = mBridgeTag;
// 62 126:aload_1
// 63 127:aload_0
// 64 128:getfield #216 <Field String mBridgeTag>
// 65 131:putfield #216 <Field String mBridgeTag>
return notificationcompat$wearableextender;
// 66 134:aload_1
// 67 135:areturn
}
public volatile Object clone()
throws CloneNotSupportedException
{
return ((Object) (clone()));
// 0 0:aload_0
// 1 1:invokevirtual #316 <Method NotificationCompat$WearableExtender clone()>
// 2 4:areturn
}
public NotificationCompat.Builder extend(NotificationCompat.Builder builder)
{
Bundle bundle = new Bundle();
// 0 0:new #136 <Class Bundle>
// 1 3:dup
// 2 4:invokespecial #244 <Method void Bundle()>
// 3 7:astore_3
if(!mActions.isEmpty())
//* 4 8:aload_0
//* 5 9:getfield #116 <Field ArrayList mActions>
//* 6 12:invokevirtual #322 <Method boolean ArrayList.isEmpty()>
//* 7 15:ifne 135
if(android.os.Build.VERSION.SDK_INT >= 16)
//* 8 18:getstatic #149 <Field int android.os.Build$VERSION.SDK_INT>
//* 9 21:bipush 16
//* 10 23:icmplt 128
{
ArrayList arraylist = new ArrayList(mActions.size());
// 11 26:new #113 <Class ArrayList>
// 12 29:dup
// 13 30:aload_0
// 14 31:getfield #116 <Field ArrayList mActions>
// 15 34:invokevirtual #153 <Method int ArrayList.size()>
// 16 37:invokespecial #325 <Method void ArrayList(int)>
// 17 40:astore 4
Iterator iterator = mActions.iterator();
// 18 42:aload_0
// 19 43:getfield #116 <Field ArrayList mActions>
// 20 46:invokevirtual #329 <Method Iterator ArrayList.iterator()>
// 21 49:astore 5
do
{
if(!iterator.hasNext())
break;
// 22 51:aload 5
// 23 53:invokeinterface #334 <Method boolean Iterator.hasNext()>
// 24 58:ifeq 117
NotificationCompat.Action action = (NotificationCompat.Action)iterator.next();
// 25 61:aload 5
// 26 63:invokeinterface #337 <Method Object Iterator.next()>
// 27 68:checkcast #155 <Class NotificationCompat$Action>
// 28 71:astore 6
if(android.os.Build.VERSION.SDK_INT >= 20)
//* 29 73:getstatic #149 <Field int android.os.Build$VERSION.SDK_INT>
//* 30 76:bipush 20
//* 31 78:icmplt 95
arraylist.add(((Object) (getActionFromActionCompat(action))));
// 32 81:aload 4
// 33 83:aload 6
// 34 85:invokestatic #339 <Method android.app.Notification$Action getActionFromActionCompat(NotificationCompat$Action)>
// 35 88:invokevirtual #289 <Method boolean ArrayList.add(Object)>
// 36 91:pop
else
//* 37 92:goto 51
if(android.os.Build.VERSION.SDK_INT >= 16)
//* 38 95:getstatic #149 <Field int android.os.Build$VERSION.SDK_INT>
//* 39 98:bipush 16
//* 40 100:icmplt 51
arraylist.add(((Object) (NotificationCompatJellybean.getBundleForAction(action))));
// 41 103:aload 4
// 42 105:aload 6
// 43 107:invokestatic #343 <Method Bundle NotificationCompatJellybean.getBundleForAction(NotificationCompat$Action)>
// 44 110:invokevirtual #289 <Method boolean ArrayList.add(Object)>
// 45 113:pop
} while(true);
// 46 114:goto 51
bundle.putParcelableArrayList("actions", arraylist);
// 47 117:aload_3
// 48 118:ldc1 #36 <String "actions">
// 49 120:aload 4
// 50 122:invokevirtual #347 <Method void Bundle.putParcelableArrayList(String, ArrayList)>
} else
//* 51 125:goto 135
{
bundle.putParcelableArrayList("actions", ((ArrayList) (null)));
// 52 128:aload_3
// 53 129:ldc1 #36 <String "actions">
// 54 131:aconst_null
// 55 132:invokevirtual #347 <Method void Bundle.putParcelableArrayList(String, ArrayList)>
}
int i = mFlags;
// 56 135:aload_0
// 57 136:getfield #118 <Field int mFlags>
// 58 139:istore_2
if(i != 1)
//* 59 140:iload_2
//* 60 141:iconst_1
//* 61 142:icmpeq 152
bundle.putInt("flags", i);
// 62 145:aload_3
// 63 146:ldc1 #66 <String "flags">
// 64 148:iload_2
// 65 149:invokevirtual #351 <Method void Bundle.putInt(String, int)>
Object obj = ((Object) (mDisplayIntent));
// 66 152:aload_0
// 67 153:getfield #191 <Field PendingIntent mDisplayIntent>
// 68 156:astore 4
if(obj != null)
//* 69 158:aload 4
//* 70 160:ifnull 171
bundle.putParcelable("displayIntent", ((android.os.Parcelable) (obj)));
// 71 163:aload_3
// 72 164:ldc1 #63 <String "displayIntent">
// 73 166:aload 4
// 74 168:invokevirtual #355 <Method void Bundle.putParcelable(String, android.os.Parcelable)>
if(!mPages.isEmpty())
//* 75 171:aload_0
//* 76 172:getfield #120 <Field ArrayList mPages>
//* 77 175:invokevirtual #322 <Method boolean ArrayList.isEmpty()>
//* 78 178:ifne 209
{
obj = ((Object) (mPages));
// 79 181:aload_0
// 80 182:getfield #120 <Field ArrayList mPages>
// 81 185:astore 4
bundle.putParcelableArray("pages", (android.os.Parcelable[])((ArrayList) (obj)).toArray(((Object []) (new Notification[((ArrayList) (obj)).size()]))));
// 82 187:aload_3
// 83 188:ldc1 #75 <String "pages">
// 84 190:aload 4
// 85 192:aload 4
// 86 194:invokevirtual #153 <Method int ArrayList.size()>
// 87 197:anewarray Notification[]
// 88 200:invokevirtual #361 <Method Object[] ArrayList.toArray(Object[])>
// 89 203:checkcast #363 <Class android.os.Parcelable[]>
// 90 206:invokevirtual #367 <Method void Bundle.putParcelableArray(String, android.os.Parcelable[])>
}
obj = ((Object) (mBackground));
// 91 209:aload_0
// 92 210:getfield #199 <Field Bitmap mBackground>
// 93 213:astore 4
if(obj != null)
//* 94 215:aload 4
//* 95 217:ifnull 228
bundle.putParcelable("background", ((android.os.Parcelable) (obj)));
// 96 220:aload_3
// 97 221:ldc1 #39 <String "background">
// 98 223:aload 4
// 99 225:invokevirtual #355 <Method void Bundle.putParcelable(String, android.os.Parcelable)>
i = mContentIcon;
// 100 228:aload_0
// 101 229:getfield #204 <Field int mContentIcon>
// 102 232:istore_2
if(i != 0)
//* 103 233:iload_2
//* 104 234:ifeq 244
bundle.putInt("contentIcon", i);
// 105 237:aload_3
// 106 238:ldc1 #48 <String "contentIcon">
// 107 240:iload_2
// 108 241:invokevirtual #351 <Method void Bundle.putInt(String, int)>
i = mContentIconGravity;
// 109 244:aload_0
// 110 245:getfield #122 <Field int mContentIconGravity>
// 111 248:istore_2
if(i != 0x800005)
//* 112 249:iload_2
//* 113 250:ldc1 #12 <Int 0x800005>
//* 114 252:icmpeq 262
bundle.putInt("contentIconGravity", i);
// 115 255:aload_3
// 116 256:ldc1 #51 <String "contentIconGravity">
// 117 258:iload_2
// 118 259:invokevirtual #351 <Method void Bundle.putInt(String, int)>
i = mContentActionIndex;
// 119 262:aload_0
// 120 263:getfield #124 <Field int mContentActionIndex>
// 121 266:istore_2
if(i != -1)
//* 122 267:iload_2
//* 123 268:iconst_m1
//* 124 269:icmpeq 279
bundle.putInt("contentActionIndex", i);
// 125 272:aload_3
// 126 273:ldc1 #45 <String "contentActionIndex">
// 127 275:iload_2
// 128 276:invokevirtual #351 <Method void Bundle.putInt(String, int)>
i = mCustomSizePreset;
// 129 279:aload_0
// 130 280:getfield #126 <Field int mCustomSizePreset>
// 131 283:istore_2
if(i != 0)
//* 132 284:iload_2
//* 133 285:ifeq 295
bundle.putInt("customSizePreset", i);
// 134 288:aload_3
// 135 289:ldc1 #57 <String "customSizePreset">
// 136 291:iload_2
// 137 292:invokevirtual #351 <Method void Bundle.putInt(String, int)>
i = mCustomContentHeight;
// 138 295:aload_0
// 139 296:getfield #206 <Field int mCustomContentHeight>
// 140 299:istore_2
if(i != 0)
//* 141 300:iload_2
//* 142 301:ifeq 311
bundle.putInt("customContentHeight", i);
// 143 304:aload_3
// 144 305:ldc1 #54 <String "customContentHeight">
// 145 307:iload_2
// 146 308:invokevirtual #351 <Method void Bundle.putInt(String, int)>
i = mGravity;
// 147 311:aload_0
// 148 312:getfield #128 <Field int mGravity>
// 149 315:istore_2
if(i != 80)
//* 150 316:iload_2
//* 151 317:bipush 80
//* 152 319:icmpeq 329
bundle.putInt("gravity", i);
// 153 322:aload_3
// 154 323:ldc1 #69 <String "gravity">
// 155 325:iload_2
// 156 326:invokevirtual #351 <Method void Bundle.putInt(String, int)>
i = mHintScreenTimeout;
// 157 329:aload_0
// 158 330:getfield #208 <Field int mHintScreenTimeout>
// 159 333:istore_2
if(i != 0)
//* 160 334:iload_2
//* 161 335:ifeq 345
bundle.putInt("hintScreenTimeout", i);
// 162 338:aload_3
// 163 339:ldc1 #72 <String "hintScreenTimeout">
// 164 341:iload_2
// 165 342:invokevirtual #351 <Method void Bundle.putInt(String, int)>
obj = ((Object) (mDismissalId));
// 166 345:aload_0
// 167 346:getfield #214 <Field String mDismissalId>
// 168 349:astore 4
if(obj != null)
//* 169 351:aload 4
//* 170 353:ifnull 364
bundle.putString("dismissalId", ((String) (obj)));
// 171 356:aload_3
// 172 357:ldc1 #60 <String "dismissalId">
// 173 359:aload 4
// 174 361:invokevirtual #371 <Method void Bundle.putString(String, String)>
obj = ((Object) (mBridgeTag));
// 175 364:aload_0
// 176 365:getfield #216 <Field String mBridgeTag>
// 177 368:astore 4
if(obj != null)
//* 178 370:aload 4
//* 179 372:ifnull 383
bundle.putString("bridgeTag", ((String) (obj)));
// 180 375:aload_3
// 181 376:ldc1 #42 <String "bridgeTag">
// 182 378:aload 4
// 183 380:invokevirtual #371 <Method void Bundle.putString(String, String)>
builder.getExtras().putBundle("android.wearable.EXTENSIONS", bundle);
// 184 383:aload_1
// 185 384:invokevirtual #374 <Method Bundle NotificationCompat$Builder.getExtras()>
// 186 387:ldc1 #20 <String "android.wearable.EXTENSIONS">
// 187 389:aload_3
// 188 390:invokevirtual #378 <Method void Bundle.putBundle(String, Bundle)>
return builder;
// 189 393:aload_1
// 190 394:areturn
}
public List getActions()
{
return ((List) (mActions));
// 0 0:aload_0
// 1 1:getfield #116 <Field ArrayList mActions>
// 2 4:areturn
}
public Bitmap getBackground()
{
return mBackground;
// 0 0:aload_0
// 1 1:getfield #199 <Field Bitmap mBackground>
// 2 4:areturn
}
public String getBridgeTag()
{
return mBridgeTag;
// 0 0:aload_0
// 1 1:getfield #216 <Field String mBridgeTag>
// 2 4:areturn
}
public int getContentAction()
{
return mContentActionIndex;
// 0 0:aload_0
// 1 1:getfield #124 <Field int mContentActionIndex>
// 2 4:ireturn
}
public int getContentIcon()
{
return mContentIcon;
// 0 0:aload_0
// 1 1:getfield #204 <Field int mContentIcon>
// 2 4:ireturn
}
public int getContentIconGravity()
{
return mContentIconGravity;
// 0 0:aload_0
// 1 1:getfield #122 <Field int mContentIconGravity>
// 2 4:ireturn
}
public boolean getContentIntentAvailableOffline()
{
return (mFlags & 1) != 0;
// 0 0:aload_0
// 1 1:getfield #118 <Field int mFlags>
// 2 4:iconst_1
// 3 5:iand
// 4 6:ifeq 11
// 5 9:iconst_1
// 6 10:ireturn
// 7 11:iconst_0
// 8 12:ireturn
}
public int getCustomContentHeight()
{
return mCustomContentHeight;
// 0 0:aload_0
// 1 1:getfield #206 <Field int mCustomContentHeight>
// 2 4:ireturn
}
public int getCustomSizePreset()
{
return mCustomSizePreset;
// 0 0:aload_0
// 1 1:getfield #126 <Field int mCustomSizePreset>
// 2 4:ireturn
}
public String getDismissalId()
{
return mDismissalId;
// 0 0:aload_0
// 1 1:getfield #214 <Field String mDismissalId>
// 2 4:areturn
}
public PendingIntent getDisplayIntent()
{
return mDisplayIntent;
// 0 0:aload_0
// 1 1:getfield #191 <Field PendingIntent mDisplayIntent>
// 2 4:areturn
}
public int getGravity()
{
return mGravity;
// 0 0:aload_0
// 1 1:getfield #128 <Field int mGravity>
// 2 4:ireturn
}
public boolean getHintAmbientBigPicture()
{
return (mFlags & 0x20) != 0;
// 0 0:aload_0
// 1 1:getfield #118 <Field int mFlags>
// 2 4:bipush 32
// 3 6:iand
// 4 7:ifeq 12
// 5 10:iconst_1
// 6 11:ireturn
// 7 12:iconst_0
// 8 13:ireturn
}
public boolean getHintAvoidBackgroundClipping()
{
return (mFlags & 0x10) != 0;
// 0 0:aload_0
// 1 1:getfield #118 <Field int mFlags>
// 2 4:bipush 16
// 3 6:iand
// 4 7:ifeq 12
// 5 10:iconst_1
// 6 11:ireturn
// 7 12:iconst_0
// 8 13:ireturn
}
public boolean getHintContentIntentLaunchesActivity()
{
return (mFlags & 0x40) != 0;
// 0 0:aload_0
// 1 1:getfield #118 <Field int mFlags>
// 2 4:bipush 64
// 3 6:iand
// 4 7:ifeq 12
// 5 10:iconst_1
// 6 11:ireturn
// 7 12:iconst_0
// 8 13:ireturn
}
public boolean getHintHideIcon()
{
return (mFlags & 2) != 0;
// 0 0:aload_0
// 1 1:getfield #118 <Field int mFlags>
// 2 4:iconst_2
// 3 5:iand
// 4 6:ifeq 11
// 5 9:iconst_1
// 6 10:ireturn
// 7 11:iconst_0
// 8 12:ireturn
}
public int getHintScreenTimeout()
{
return mHintScreenTimeout;
// 0 0:aload_0
// 1 1:getfield #208 <Field int mHintScreenTimeout>
// 2 4:ireturn
}
public boolean getHintShowBackgroundOnly()
{
return (mFlags & 4) != 0;
// 0 0:aload_0
// 1 1:getfield #118 <Field int mFlags>
// 2 4:iconst_4
// 3 5:iand
// 4 6:ifeq 11
// 5 9:iconst_1
// 6 10:ireturn
// 7 11:iconst_0
// 8 12:ireturn
}
public List getPages()
{
return ((List) (mPages));
// 0 0:aload_0
// 1 1:getfield #120 <Field ArrayList mPages>
// 2 4:areturn
}
public boolean getStartScrollBottom()
{
return (mFlags & 8) != 0;
// 0 0:aload_0
// 1 1:getfield #118 <Field int mFlags>
// 2 4:bipush 8
// 3 6:iand
// 4 7:ifeq 12
// 5 10:iconst_1
// 6 11:ireturn
// 7 12:iconst_0
// 8 13:ireturn
}
public NotificationCompat$WearableExtender setBackground(Bitmap bitmap)
{
mBackground = bitmap;
// 0 0:aload_0
// 1 1:aload_1
// 2 2:putfield #199 <Field Bitmap mBackground>
return this;
// 3 5:aload_0
// 4 6:areturn
}
public NotificationCompat$WearableExtender setBridgeTag(String s)
{
mBridgeTag = s;
// 0 0:aload_0
// 1 1:aload_1
// 2 2:putfield #216 <Field String mBridgeTag>
return this;
// 3 5:aload_0
// 4 6:areturn
}
public NotificationCompat$WearableExtender setContentAction(int i)
{
mContentActionIndex = i;
// 0 0:aload_0
// 1 1:iload_1
// 2 2:putfield #124 <Field int mContentActionIndex>
return this;
// 3 5:aload_0
// 4 6:areturn
}
public NotificationCompat$WearableExtender setContentIcon(int i)
{
mContentIcon = i;
// 0 0:aload_0
// 1 1:iload_1
// 2 2:putfield #204 <Field int mContentIcon>
return this;
// 3 5:aload_0
// 4 6:areturn
}
public NotificationCompat$WearableExtender setContentIconGravity(int i)
{
mContentIconGravity = i;
// 0 0:aload_0
// 1 1:iload_1
// 2 2:putfield #122 <Field int mContentIconGravity>
return this;
// 3 5:aload_0
// 4 6:areturn
}
public NotificationCompat$WearableExtender setContentIntentAvailableOffline(boolean flag)
{
setFlag(1, flag);
// 0 0:aload_0
// 1 1:iconst_1
// 2 2:iload_1
// 3 3:invokespecial #417 <Method void setFlag(int, boolean)>
return this;
// 4 6:aload_0
// 5 7:areturn
}
public NotificationCompat$WearableExtender setCustomContentHeight(int i)
{
mCustomContentHeight = i;
// 0 0:aload_0
// 1 1:iload_1
// 2 2:putfield #206 <Field int mCustomContentHeight>
return this;
// 3 5:aload_0
// 4 6:areturn
}
public NotificationCompat$WearableExtender setCustomSizePreset(int i)
{
mCustomSizePreset = i;
// 0 0:aload_0
// 1 1:iload_1
// 2 2:putfield #126 <Field int mCustomSizePreset>
return this;
// 3 5:aload_0
// 4 6:areturn
}
public NotificationCompat$WearableExtender setDismissalId(String s)
{
mDismissalId = s;
// 0 0:aload_0
// 1 1:aload_1
// 2 2:putfield #214 <Field String mDismissalId>
return this;
// 3 5:aload_0
// 4 6:areturn
}
public NotificationCompat$WearableExtender setDisplayIntent(PendingIntent pendingintent)
{
mDisplayIntent = pendingintent;
// 0 0:aload_0
// 1 1:aload_1
// 2 2:putfield #191 <Field PendingIntent mDisplayIntent>
return this;
// 3 5:aload_0
// 4 6:areturn
}
public NotificationCompat$WearableExtender setGravity(int i)
{
mGravity = i;
// 0 0:aload_0
// 1 1:iload_1
// 2 2:putfield #128 <Field int mGravity>
return this;
// 3 5:aload_0
// 4 6:areturn
}
public NotificationCompat$WearableExtender setHintAmbientBigPicture(boolean flag)
{
setFlag(32, flag);
// 0 0:aload_0
// 1 1:bipush 32
// 2 3:iload_1
// 3 4:invokespecial #417 <Method void setFlag(int, boolean)>
return this;
// 4 7:aload_0
// 5 8:areturn
}
public NotificationCompat$WearableExtender setHintAvoidBackgroundClipping(boolean flag)
{
setFlag(16, flag);
// 0 0:aload_0
// 1 1:bipush 16
// 2 3:iload_1
// 3 4:invokespecial #417 <Method void setFlag(int, boolean)>
return this;
// 4 7:aload_0
// 5 8:areturn
}
public NotificationCompat$WearableExtender setHintContentIntentLaunchesActivity(boolean flag)
{
setFlag(64, flag);
// 0 0:aload_0
// 1 1:bipush 64
// 2 3:iload_1
// 3 4:invokespecial #417 <Method void setFlag(int, boolean)>
return this;
// 4 7:aload_0
// 5 8:areturn
}
public NotificationCompat$WearableExtender setHintHideIcon(boolean flag)
{
setFlag(2, flag);
// 0 0:aload_0
// 1 1:iconst_2
// 2 2:iload_1
// 3 3:invokespecial #417 <Method void setFlag(int, boolean)>
return this;
// 4 6:aload_0
// 5 7:areturn
}
public NotificationCompat$WearableExtender setHintScreenTimeout(int i)
{
mHintScreenTimeout = i;
// 0 0:aload_0
// 1 1:iload_1
// 2 2:putfield #208 <Field int mHintScreenTimeout>
return this;
// 3 5:aload_0
// 4 6:areturn
}
public NotificationCompat$WearableExtender setHintShowBackgroundOnly(boolean flag)
{
setFlag(4, flag);
// 0 0:aload_0
// 1 1:iconst_4
// 2 2:iload_1
// 3 3:invokespecial #417 <Method void setFlag(int, boolean)>
return this;
// 4 6:aload_0
// 5 7:areturn
}
public NotificationCompat$WearableExtender setStartScrollBottom(boolean flag)
{
setFlag(8, flag);
// 0 0:aload_0
// 1 1:bipush 8
// 2 3:iload_1
// 3 4:invokespecial #417 <Method void setFlag(int, boolean)>
return this;
// 4 7:aload_0
// 5 8:areturn
}
private static final int DEFAULT_CONTENT_ICON_GRAVITY = 0x800005;
private static final int DEFAULT_FLAGS = 1;
private static final int DEFAULT_GRAVITY = 80;
private static final String EXTRA_WEARABLE_EXTENSIONS = "android.wearable.EXTENSIONS";
private static final int FLAG_BIG_PICTURE_AMBIENT = 32;
private static final int FLAG_CONTENT_INTENT_AVAILABLE_OFFLINE = 1;
private static final int FLAG_HINT_AVOID_BACKGROUND_CLIPPING = 16;
private static final int FLAG_HINT_CONTENT_INTENT_LAUNCHES_ACTIVITY = 64;
private static final int FLAG_HINT_HIDE_ICON = 2;
private static final int FLAG_HINT_SHOW_BACKGROUND_ONLY = 4;
private static final int FLAG_START_SCROLL_BOTTOM = 8;
private static final String KEY_ACTIONS = "actions";
private static final String KEY_BACKGROUND = "background";
private static final String KEY_BRIDGE_TAG = "bridgeTag";
private static final String KEY_CONTENT_ACTION_INDEX = "contentActionIndex";
private static final String KEY_CONTENT_ICON = "contentIcon";
private static final String KEY_CONTENT_ICON_GRAVITY = "contentIconGravity";
private static final String KEY_CUSTOM_CONTENT_HEIGHT = "customContentHeight";
private static final String KEY_CUSTOM_SIZE_PRESET = "customSizePreset";
private static final String KEY_DISMISSAL_ID = "dismissalId";
private static final String KEY_DISPLAY_INTENT = "displayIntent";
private static final String KEY_FLAGS = "flags";
private static final String KEY_GRAVITY = "gravity";
private static final String KEY_HINT_SCREEN_TIMEOUT = "hintScreenTimeout";
private static final String KEY_PAGES = "pages";
public static final int SCREEN_TIMEOUT_LONG = -1;
public static final int SCREEN_TIMEOUT_SHORT = 0;
public static final int SIZE_DEFAULT = 0;
public static final int SIZE_FULL_SCREEN = 5;
public static final int SIZE_LARGE = 4;
public static final int SIZE_MEDIUM = 3;
public static final int SIZE_SMALL = 2;
public static final int SIZE_XSMALL = 1;
public static final int UNSET_ACTION_INDEX = -1;
private ArrayList mActions;
private Bitmap mBackground;
private String mBridgeTag;
private int mContentActionIndex;
private int mContentIcon;
private int mContentIconGravity;
private int mCustomContentHeight;
private int mCustomSizePreset;
private String mDismissalId;
private PendingIntent mDisplayIntent;
private int mFlags;
private int mGravity;
private int mHintScreenTimeout;
private ArrayList mPages;
public NotificationCompat$WearableExtender()
{
// 0 0:aload_0
// 1 1:invokespecial #111 <Method void Object()>
mActions = new ArrayList();
// 2 4:aload_0
// 3 5:new #113 <Class ArrayList>
// 4 8:dup
// 5 9:invokespecial #114 <Method void ArrayList()>
// 6 12:putfield #116 <Field ArrayList mActions>
mFlags = 1;
// 7 15:aload_0
// 8 16:iconst_1
// 9 17:putfield #118 <Field int mFlags>
mPages = new ArrayList();
// 10 20:aload_0
// 11 21:new #113 <Class ArrayList>
// 12 24:dup
// 13 25:invokespecial #114 <Method void ArrayList()>
// 14 28:putfield #120 <Field ArrayList mPages>
mContentIconGravity = 0x800005;
// 15 31:aload_0
// 16 32:ldc1 #12 <Int 0x800005>
// 17 34:putfield #122 <Field int mContentIconGravity>
mContentActionIndex = -1;
// 18 37:aload_0
// 19 38:iconst_m1
// 20 39:putfield #124 <Field int mContentActionIndex>
mCustomSizePreset = 0;
// 21 42:aload_0
// 22 43:iconst_0
// 23 44:putfield #126 <Field int mCustomSizePreset>
mGravity = 80;
// 24 47:aload_0
// 25 48:bipush 80
// 26 50:putfield #128 <Field int mGravity>
// 27 53:return
}
public NotificationCompat$WearableExtender(Notification notification)
{
// 0 0:aload_0
// 1 1:invokespecial #111 <Method void Object()>
mActions = new ArrayList();
// 2 4:aload_0
// 3 5:new #113 <Class ArrayList>
// 4 8:dup
// 5 9:invokespecial #114 <Method void ArrayList()>
// 6 12:putfield #116 <Field ArrayList mActions>
mFlags = 1;
// 7 15:aload_0
// 8 16:iconst_1
// 9 17:putfield #118 <Field int mFlags>
mPages = new ArrayList();
// 10 20:aload_0
// 11 21:new #113 <Class ArrayList>
// 12 24:dup
// 13 25:invokespecial #114 <Method void ArrayList()>
// 14 28:putfield #120 <Field ArrayList mPages>
mContentIconGravity = 0x800005;
// 15 31:aload_0
// 16 32:ldc1 #12 <Int 0x800005>
// 17 34:putfield #122 <Field int mContentIconGravity>
mContentActionIndex = -1;
// 18 37:aload_0
// 19 38:iconst_m1
// 20 39:putfield #124 <Field int mContentActionIndex>
mCustomSizePreset = 0;
// 21 42:aload_0
// 22 43:iconst_0
// 23 44:putfield #126 <Field int mCustomSizePreset>
mGravity = 80;
// 24 47:aload_0
// 25 48:bipush 80
// 26 50:putfield #128 <Field int mGravity>
notification = ((Notification) (NotificationCompat.getExtras(notification)));
// 27 53:aload_1
// 28 54:invokestatic #134 <Method Bundle NotificationCompat.getExtras(Notification)>
// 29 57:astore_1
if(notification != null)
//* 30 58:aload_1
//* 31 59:ifnull 72
notification = ((Notification) (((Bundle) (notification)).getBundle("android.wearable.EXTENSIONS")));
// 32 62:aload_1
// 33 63:ldc1 #20 <String "android.wearable.EXTENSIONS">
// 34 65:invokevirtual #140 <Method Bundle Bundle.getBundle(String)>
// 35 68:astore_1
else
//* 36 69:goto 74
notification = null;
// 37 72:aconst_null
// 38 73:astore_1
if(notification != null)
//* 39 74:aload_1
//* 40 75:ifnull 337
{
ArrayList arraylist = ((Bundle) (notification)).getParcelableArrayList("actions");
// 41 78:aload_1
// 42 79:ldc1 #36 <String "actions">
// 43 81:invokevirtual #144 <Method ArrayList Bundle.getParcelableArrayList(String)>
// 44 84:astore_3
if(android.os.Build.VERSION.SDK_INT >= 16 && arraylist != null)
//* 45 85:getstatic #149 <Field int android.os.Build$VERSION.SDK_INT>
//* 46 88:bipush 16
//* 47 90:icmplt 184
//* 48 93:aload_3
//* 49 94:ifnull 184
{
NotificationCompat.Action aaction[] = new NotificationCompat.Action[arraylist.size()];
// 50 97:aload_3
// 51 98:invokevirtual #153 <Method int ArrayList.size()>
// 52 101:anewarray NotificationCompat.Action[]
// 53 104:astore 4
for(int i = 0; i < aaction.length; i++)
//* 54 106:iconst_0
//* 55 107:istore_2
//* 56 108:iload_2
//* 57 109:aload 4
//* 58 111:arraylength
//* 59 112:icmpge 171
{
if(android.os.Build.VERSION.SDK_INT >= 20)
//* 60 115:getstatic #149 <Field int android.os.Build$VERSION.SDK_INT>
//* 61 118:bipush 20
//* 62 120:icmplt 141
{
aaction[i] = NotificationCompat.getActionCompatFromAction((android.app.Notification.Action)arraylist.get(i));
// 63 123:aload 4
// 64 125:iload_2
// 65 126:aload_3
// 66 127:iload_2
// 67 128:invokevirtual #159 <Method Object ArrayList.get(int)>
// 68 131:checkcast #161 <Class android.app.Notification$Action>
// 69 134:invokestatic #165 <Method NotificationCompat$Action NotificationCompat.getActionCompatFromAction(android.app.Notification$Action)>
// 70 137:aastore
continue;
// 71 138:goto 164
}
if(android.os.Build.VERSION.SDK_INT >= 16)
//* 72 141:getstatic #149 <Field int android.os.Build$VERSION.SDK_INT>
//* 73 144:bipush 16
//* 74 146:icmplt 164
aaction[i] = NotificationCompatJellybean.getActionFromBundle((Bundle)arraylist.get(i));
// 75 149:aload 4
// 76 151:iload_2
// 77 152:aload_3
// 78 153:iload_2
// 79 154:invokevirtual #159 <Method Object ArrayList.get(int)>
// 80 157:checkcast #136 <Class Bundle>
// 81 160:invokestatic #171 <Method NotificationCompat$Action NotificationCompatJellybean.getActionFromBundle(Bundle)>
// 82 163:aastore
}
// 83 164:iload_2
// 84 165:iconst_1
// 85 166:iadd
// 86 167:istore_2
//* 87 168:goto 108
Collections.addAll(((java.util.Collection) (mActions)), ((Object []) ((NotificationCompat.Action[])aaction)));
// 88 171:aload_0
// 89 172:getfield #116 <Field ArrayList mActions>
// 90 175:aload 4
// 91 177:checkcast #173 <Class NotificationCompat$Action[]>
// 92 180:invokestatic #179 <Method boolean Collections.addAll(java.util.Collection, Object[])>
// 93 183:pop
}
mFlags = ((Bundle) (notification)).getInt("flags", 1);
// 94 184:aload_0
// 95 185:aload_1
// 96 186:ldc1 #66 <String "flags">
// 97 188:iconst_1
// 98 189:invokevirtual #183 <Method int Bundle.getInt(String, int)>
// 99 192:putfield #118 <Field int mFlags>
mDisplayIntent = (PendingIntent)((Bundle) (notification)).getParcelable("displayIntent");
// 100 195:aload_0
// 101 196:aload_1
// 102 197:ldc1 #63 <String "displayIntent">
// 103 199:invokevirtual #187 <Method android.os.Parcelable Bundle.getParcelable(String)>
// 104 202:checkcast #189 <Class PendingIntent>
// 105 205:putfield #191 <Field PendingIntent mDisplayIntent>
Notification anotification[] = NotificationCompat.getNotificationArrayFromBundle(((Bundle) (notification)), "pages");
// 106 208:aload_1
// 107 209:ldc1 #75 <String "pages">
// 108 211:invokestatic #195 <Method Notification[] NotificationCompat.getNotificationArrayFromBundle(Bundle, String)>
// 109 214:astore_3
if(anotification != null)
//* 110 215:aload_3
//* 111 216:ifnull 228
Collections.addAll(((java.util.Collection) (mPages)), ((Object []) (anotification)));
// 112 219:aload_0
// 113 220:getfield #120 <Field ArrayList mPages>
// 114 223:aload_3
// 115 224:invokestatic #179 <Method boolean Collections.addAll(java.util.Collection, Object[])>
// 116 227:pop
mBackground = (Bitmap)((Bundle) (notification)).getParcelable("background");
// 117 228:aload_0
// 118 229:aload_1
// 119 230:ldc1 #39 <String "background">
// 120 232:invokevirtual #187 <Method android.os.Parcelable Bundle.getParcelable(String)>
// 121 235:checkcast #197 <Class Bitmap>
// 122 238:putfield #199 <Field Bitmap mBackground>
mContentIcon = ((Bundle) (notification)).getInt("contentIcon");
// 123 241:aload_0
// 124 242:aload_1
// 125 243:ldc1 #48 <String "contentIcon">
// 126 245:invokevirtual #202 <Method int Bundle.getInt(String)>
// 127 248:putfield #204 <Field int mContentIcon>
mContentIconGravity = ((Bundle) (notification)).getInt("contentIconGravity", 0x800005);
// 128 251:aload_0
// 129 252:aload_1
// 130 253:ldc1 #51 <String "contentIconGravity">
// 131 255:ldc1 #12 <Int 0x800005>
// 132 257:invokevirtual #183 <Method int Bundle.getInt(String, int)>
// 133 260:putfield #122 <Field int mContentIconGravity>
mContentActionIndex = ((Bundle) (notification)).getInt("contentActionIndex", -1);
// 134 263:aload_0
// 135 264:aload_1
// 136 265:ldc1 #45 <String "contentActionIndex">
// 137 267:iconst_m1
// 138 268:invokevirtual #183 <Method int Bundle.getInt(String, int)>
// 139 271:putfield #124 <Field int mContentActionIndex>
mCustomSizePreset = ((Bundle) (notification)).getInt("customSizePreset", 0);
// 140 274:aload_0
// 141 275:aload_1
// 142 276:ldc1 #57 <String "customSizePreset">
// 143 278:iconst_0
// 144 279:invokevirtual #183 <Method int Bundle.getInt(String, int)>
// 145 282:putfield #126 <Field int mCustomSizePreset>
mCustomContentHeight = ((Bundle) (notification)).getInt("customContentHeight");
// 146 285:aload_0
// 147 286:aload_1
// 148 287:ldc1 #54 <String "customContentHeight">
// 149 289:invokevirtual #202 <Method int Bundle.getInt(String)>
// 150 292:putfield #206 <Field int mCustomContentHeight>
mGravity = ((Bundle) (notification)).getInt("gravity", 80);
// 151 295:aload_0
// 152 296:aload_1
// 153 297:ldc1 #69 <String "gravity">
// 154 299:bipush 80
// 155 301:invokevirtual #183 <Method int Bundle.getInt(String, int)>
// 156 304:putfield #128 <Field int mGravity>
mHintScreenTimeout = ((Bundle) (notification)).getInt("hintScreenTimeout");
// 157 307:aload_0
// 158 308:aload_1
// 159 309:ldc1 #72 <String "hintScreenTimeout">
// 160 311:invokevirtual #202 <Method int Bundle.getInt(String)>
// 161 314:putfield #208 <Field int mHintScreenTimeout>
mDismissalId = ((Bundle) (notification)).getString("dismissalId");
// 162 317:aload_0
// 163 318:aload_1
// 164 319:ldc1 #60 <String "dismissalId">
// 165 321:invokevirtual #212 <Method String Bundle.getString(String)>
// 166 324:putfield #214 <Field String mDismissalId>
mBridgeTag = ((Bundle) (notification)).getString("bridgeTag");
// 167 327:aload_0
// 168 328:aload_1
// 169 329:ldc1 #42 <String "bridgeTag">
// 170 331:invokevirtual #212 <Method String Bundle.getString(String)>
// 171 334:putfield #216 <Field String mBridgeTag>
}
// 172 337:return
}
}
|
wilebeast/FireFox-OS | B2G/gecko/mobile/android/base/httpclientandroidlib/client/protocol/RequestAddCookies.java | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package ch.boye.httpclientandroidlib.client.protocol;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import ch.boye.httpclientandroidlib.annotation.Immutable;
import ch.boye.httpclientandroidlib.androidextra.HttpClientAndroidLog;
/* LogFactory removed by HttpClient for Android script. */
import ch.boye.httpclientandroidlib.Header;
import ch.boye.httpclientandroidlib.HttpException;
import ch.boye.httpclientandroidlib.HttpHost;
import ch.boye.httpclientandroidlib.HttpRequest;
import ch.boye.httpclientandroidlib.HttpRequestInterceptor;
import ch.boye.httpclientandroidlib.ProtocolException;
import ch.boye.httpclientandroidlib.client.CookieStore;
import ch.boye.httpclientandroidlib.client.methods.HttpUriRequest;
import ch.boye.httpclientandroidlib.client.params.HttpClientParams;
import ch.boye.httpclientandroidlib.conn.HttpRoutedConnection;
import ch.boye.httpclientandroidlib.conn.routing.HttpRoute;
import ch.boye.httpclientandroidlib.cookie.Cookie;
import ch.boye.httpclientandroidlib.cookie.CookieOrigin;
import ch.boye.httpclientandroidlib.cookie.CookieSpec;
import ch.boye.httpclientandroidlib.cookie.CookieSpecRegistry;
import ch.boye.httpclientandroidlib.cookie.SetCookie2;
import ch.boye.httpclientandroidlib.protocol.HttpContext;
import ch.boye.httpclientandroidlib.protocol.ExecutionContext;
/**
* Request interceptor that matches cookies available in the current
* {@link CookieStore} to the request being executed and generates
* corresponding <code>Cookie</code> request headers.
* <p>
* The following parameters can be used to customize the behavior of this
* class:
* <ul>
* <li>{@link ch.boye.httpclientandroidlib.cookie.params.CookieSpecPNames#DATE_PATTERNS}</li>
* <li>{@link ch.boye.httpclientandroidlib.cookie.params.CookieSpecPNames#SINGLE_COOKIE_HEADER}</li>
* <li>{@link ch.boye.httpclientandroidlib.client.params.ClientPNames#COOKIE_POLICY}</li>
* </ul>
*
* @since 4.0
*/
@Immutable
public class RequestAddCookies implements HttpRequestInterceptor {
public HttpClientAndroidLog log = new HttpClientAndroidLog(getClass());
public RequestAddCookies() {
super();
}
public void process(final HttpRequest request, final HttpContext context)
throws HttpException, IOException {
if (request == null) {
throw new IllegalArgumentException("HTTP request may not be null");
}
if (context == null) {
throw new IllegalArgumentException("HTTP context may not be null");
}
String method = request.getRequestLine().getMethod();
if (method.equalsIgnoreCase("CONNECT")) {
return;
}
// Obtain cookie store
CookieStore cookieStore = (CookieStore) context.getAttribute(
ClientContext.COOKIE_STORE);
if (cookieStore == null) {
this.log.debug("Cookie store not specified in HTTP context");
return;
}
// Obtain the registry of cookie specs
CookieSpecRegistry registry = (CookieSpecRegistry) context.getAttribute(
ClientContext.COOKIESPEC_REGISTRY);
if (registry == null) {
this.log.debug("CookieSpec registry not specified in HTTP context");
return;
}
// Obtain the target host (required)
HttpHost targetHost = (HttpHost) context.getAttribute(
ExecutionContext.HTTP_TARGET_HOST);
if (targetHost == null) {
this.log.debug("Target host not set in the context");
return;
}
// Obtain the client connection (required)
HttpRoutedConnection conn = (HttpRoutedConnection) context.getAttribute(
ExecutionContext.HTTP_CONNECTION);
if (conn == null) {
this.log.debug("HTTP connection not set in the context");
return;
}
String policy = HttpClientParams.getCookiePolicy(request.getParams());
if (this.log.isDebugEnabled()) {
this.log.debug("CookieSpec selected: " + policy);
}
URI requestURI;
if (request instanceof HttpUriRequest) {
requestURI = ((HttpUriRequest) request).getURI();
} else {
try {
requestURI = new URI(request.getRequestLine().getUri());
} catch (URISyntaxException ex) {
throw new ProtocolException("Invalid request URI: " +
request.getRequestLine().getUri(), ex);
}
}
String hostName = targetHost.getHostName();
int port = targetHost.getPort();
if (port < 0) {
HttpRoute route = conn.getRoute();
if (route.getHopCount() == 1) {
port = conn.getRemotePort();
} else {
// Target port will be selected by the proxy.
// Use conventional ports for known schemes
String scheme = targetHost.getSchemeName();
if (scheme.equalsIgnoreCase("http")) {
port = 80;
} else if (scheme.equalsIgnoreCase("https")) {
port = 443;
} else {
port = 0;
}
}
}
CookieOrigin cookieOrigin = new CookieOrigin(
hostName,
port,
requestURI.getPath(),
conn.isSecure());
// Get an instance of the selected cookie policy
CookieSpec cookieSpec = registry.getCookieSpec(policy, request.getParams());
// Get all cookies available in the HTTP state
List<Cookie> cookies = new ArrayList<Cookie>(cookieStore.getCookies());
// Find cookies matching the given origin
List<Cookie> matchedCookies = new ArrayList<Cookie>();
Date now = new Date();
for (Cookie cookie : cookies) {
if (!cookie.isExpired(now)) {
if (cookieSpec.match(cookie, cookieOrigin)) {
if (this.log.isDebugEnabled()) {
this.log.debug("Cookie " + cookie + " match " + cookieOrigin);
}
matchedCookies.add(cookie);
}
} else {
if (this.log.isDebugEnabled()) {
this.log.debug("Cookie " + cookie + " expired");
}
}
}
// Generate Cookie request headers
if (!matchedCookies.isEmpty()) {
List<Header> headers = cookieSpec.formatCookies(matchedCookies);
for (Header header : headers) {
request.addHeader(header);
}
}
int ver = cookieSpec.getVersion();
if (ver > 0) {
boolean needVersionHeader = false;
for (Cookie cookie : matchedCookies) {
if (ver != cookie.getVersion() || !(cookie instanceof SetCookie2)) {
needVersionHeader = true;
}
}
if (needVersionHeader) {
Header header = cookieSpec.getVersionHeader();
if (header != null) {
// Advertise cookie version support
request.addHeader(header);
}
}
}
// Stick the CookieSpec and CookieOrigin instances to the HTTP context
// so they could be obtained by the response interceptor
context.setAttribute(ClientContext.COOKIE_SPEC, cookieSpec);
context.setAttribute(ClientContext.COOKIE_ORIGIN, cookieOrigin);
}
}
|
biswain2309/amadeus-code-examples | airport_and_city_search/v1/get/locations_locationsid/Node SDK/airport_and_city_search.js | <reponame>biswain2309/amadeus-code-examples
var Amadeus = require("amadeus");
var amadeus = new Amadeus({
clientId: 'YOUR_API_KEY',
clientSecret: 'YOUR_API_SECRET'
});
// Which cities or airports start with ’r'?
amadeus.referenceData.locations.get({
keyword: 'r',
subType: Amadeus.location.any
}).then(function (response) {
console.log(response);
}).catch(function (response) {
console.error(response);
}); |
luyang1210/arx | src/main/org/deidentifier/arx/aggregates/HierarchyBuilderOrderBased.java | /*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2016 <NAME>, <NAME> and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.aggregates;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.deidentifier.arx.DataType;
/**
* This class enables building hierarchies for categorical and non-categorical values
* by ordering the data items and merging into groups with predefined sizes.
*
* @author <NAME>
* @param <T>
*/
public class HierarchyBuilderOrderBased<T> extends HierarchyBuilderGroupingBased<T> {
/**
* A serializable comparator.
*
* @author <NAME>
* @param <T>
*/
public static abstract class SerializableComparator<T> implements Comparator<T>, Serializable {
/** TODO */
private static final long serialVersionUID = 3851134667082727602L;
}
/**
*
*
* @param <T>
*/
@SuppressWarnings("hiding")
protected class CloseElements<T> extends AbstractGroup {
/** TODO */
private static final long serialVersionUID = 7224062023293601561L;
/** TODO */
private String[] values;
/**
*
*
* @param values
* @param function
*/
protected CloseElements(String[] values, AggregateFunction<T> function) {
super(function.aggregate(values));
this.values = values;
}
/**
*
*
* @return
*/
protected String[] getValues(){
return values;
}
/**
*
*
* @param list
* @param function
* @return
*/
@SuppressWarnings("rawtypes")
protected CloseElements merge(List<CloseElements<T>> list, AggregateFunction<T> function) {
List<String> values = new ArrayList<String>();
for (CloseElements group : list){
for (String s : ((CloseElements)group).getValues()) {
values.add(s);
}
}
return new CloseElements<T>(values.toArray(new String[values.size()]), function);
}
}
/** TODO */
private static final long serialVersionUID = -2749758635401073668L;
/**
* Creates a new instance. Either preserves the given order, or
* sorts the items according to the order induced by the given data type
*
* @param <T>
* @param type The data type is also used for ordering data items
* @param order Should the items be sorted according to the order induced by the data type
* @return
*/
public static <T> HierarchyBuilderOrderBased<T> create(final DataType<T> type, boolean order) {
return new HierarchyBuilderOrderBased<T>(type, order);
}
/**
* Creates a new instance. Uses the comparator for ordering data items
*
* @param <T>
* @param type The data type
* @param comparator Use this comparator for ordering data items
* @return
*/
public static <T> HierarchyBuilderOrderBased<T> create(final DataType<T> type, final Comparator<T> comparator) {
return new HierarchyBuilderOrderBased<T>(type, comparator);
}
/**
* Creates a new instance. Uses the defined order for data items
*
* @param <T>
* @param type The data type
* @param order Use this for ordering data items
* @return
*/
public static <T> HierarchyBuilderOrderBased<T> create(final DataType<T> type, final String[] order) {
return new HierarchyBuilderOrderBased<T>(type, order);
}
/**
* Loads a builder specification from the given file.
*
* @param <T>
* @param file
* @return
* @throws IOException
*/
@SuppressWarnings("unchecked")
public static <T> HierarchyBuilderOrderBased<T> create(File file) throws IOException{
ObjectInputStream ois = null;
try {
ois = new ObjectInputStream(new FileInputStream(file));
HierarchyBuilderOrderBased<T> result = (HierarchyBuilderOrderBased<T>)ois.readObject();
return result;
} catch (Exception e) {
throw new IOException(e);
} finally {
if (ois != null) ois.close();
}
}
/**
* Loads a builder specification from the given file.
*
* @param <T>
* @param file
* @return
* @throws IOException
*/
public static <T> HierarchyBuilderOrderBased<T> create(String file) throws IOException{
return create(new File(file));
}
/** TODO */
private final Comparator<String> comparator;
/**
* Creates a new instance.
*
* @param type The data type is also used for ordering data items
* @param order Should the items be sorted according to the order induced by the data type
*/
private HierarchyBuilderOrderBased(final DataType<T> type, boolean order) {
super(Type.ORDER_BASED, type);
if (order) {
this.comparator = new SerializableComparator<String>(){
private static final long serialVersionUID = -5728888259809544706L;
@Override
public int compare(String o1, String o2) {
try {
return type.compare(o1, o2);
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
};
} else {
this.comparator = null;
}
this.function = AggregateFunction.forType(type).createSetFunction();
}
/**
* Creates a new instance.
*
* @param type The data type
* @param comparator Use this comparator for ordering data items
*/
private HierarchyBuilderOrderBased(final DataType<T> type, final Comparator<T> comparator) {
super(Type.ORDER_BASED, type);
if (!(comparator instanceof Serializable)) {
throw new IllegalArgumentException("Comparator must be serializable");
}
this.comparator = new SerializableComparator<String>(){
private static final long serialVersionUID = -487411642974218418L;
@Override
public int compare(String o1, String o2) {
try {
return comparator.compare(type.parse(o1), type.parse(o2));
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
};
this.function = AggregateFunction.forType(type).createSetFunction();
}
/**
* Creates a new instance.
*
* @param type The data type
* @param order Use this for ordering data items
*/
private HierarchyBuilderOrderBased(final DataType<T> type, final String[] order) {
super(Type.ORDER_BASED, type);
final Map<String, Integer> map = new HashMap<String, Integer>();
for (int i=0; i<order.length; i++) {
map.put(order[i], i);
}
this.comparator = new SerializableComparator<String>(){
private static final long serialVersionUID = 8016783606581696832L;
@Override
public int compare(String o1, String o2) {
try {
return map.get(o1).compareTo(map.get(o2));
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
}
};
this.function = AggregateFunction.forType(type).createSetFunction();
}
/**
* Returns the comparator.
*
* @return
*/
public Comparator<String> getComparator(){
return comparator;
}
@SuppressWarnings("unchecked")
@Override
protected AbstractGroup[][] prepareGroups() {
if (comparator != null) {
try {
Arrays.sort(super.getData(), comparator);
} catch (Exception e){
throw new IllegalArgumentException(e.getMessage());
}
}
List<Group<T>> groups = super.getLevel(0).getGroups();
List<String> items = new ArrayList<String>();
// Prepare
String[] data = getData();
List<AbstractGroup[]> result = new ArrayList<AbstractGroup[]>();
int index = 0;
int resultIndex = 0;
int groupCount = 0;
// Break if no groups specified
if (!super.getLevels().isEmpty() &&
!super.getLevel(0).getGroups().isEmpty()) {
// Create first column
AbstractGroup[] first = new AbstractGroup[data.length];
outer: while (true) {
for (Group<T> group : groups) {
for (int i = 0; i<group.getSize(); i++){
items.add(data[index++]);
if (index == data.length) break;
}
CloseElements<T> element = new CloseElements<T>(items.toArray(new String[items.size()]), group.getFunction());
for (int i=0; i<items.size(); i++) {
first[resultIndex++] = element;
}
groupCount++;
items.clear();
if (index == data.length) break outer;
}
}
result.add(first);
// Break if done
if (groupCount>1) {
// Build higher-level columns
for (int i=1; i<super.getLevels().size(); i++){
// Break if done
if (groupCount==1) break;
// Prepare
groupCount = 0;
groups = super.getLevel(i).getGroups();
Map<AbstractGroup, AbstractGroup> map = new HashMap<AbstractGroup, AbstractGroup>();
List<AbstractGroup> list = new ArrayList<AbstractGroup>();
AbstractGroup[] column = result.get(i-1);
for (int j=0; j<column.length; j++){
if (!map.containsKey(column[j])) {
map.put(column[j], column[j]);
list.add(column[j]);
}
}
// Build
index = 0;
resultIndex = 0;
List<CloseElements<T>> gItems = new ArrayList<CloseElements<T>>();
outer: while (true) {
for (Group<T> group : groups) {
for (int j = 0; j<group.getSize(); j++){
gItems.add((CloseElements<T>)list.get(index++));
if (index == list.size()) break;
}
CloseElements<T> element = gItems.get(0).merge(gItems, group.getFunction());
groupCount++;
for (int j=0; j<gItems.size(); j++) {
map.put(gItems.get(j), element);
}
gItems.clear();
if (index == list.size()) break outer;
}
}
// Store
AbstractGroup[] ccolumn = new AbstractGroup[data.length];
for (int j=0; j<column.length; j++){
ccolumn[j] = map.get(column[j]);
}
result.add(ccolumn);
}
}
} else {
groupCount = data.length;
}
// Add one last column if more than one group left
if (groupCount>1) {
AbstractGroup[] column = new AbstractGroup[data.length];
CloseElements<T> element = new CloseElements<T>(new String[]{}, AggregateFunction.forType(getDataType()).createConstantFunction(DataType.ANY_VALUE));
for (int i=0; i<column.length; i++){
column[i] = element;
}
result.add(column);
}
// Return
return result.toArray(new AbstractGroup[0][0]);
}
}
|
danx0r/mongo | src/mongo/db/s/implicit_create_collection_test.cpp | /**
* Copyright (C) 2018 MongoDB Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* As a special exception, the copyright holders give permission to link the
* code of portions of this program with the OpenSSL library under certain
* conditions as described in each individual source file and distribute
* linked combinations including the program with the OpenSSL library. You
* must comply with the GNU Affero General Public License in all respects
* for all of the code used other than as permitted herein. If you modify
* file(s) with this exception, you may extend this exception to your
* version of the file(s), but you are not obligated to do so. If you do not
* wish to do so, delete this exception statement from your version. If you
* delete this exception statement from all source files in the program,
* then also delete it in the license file.
*/
#include "mongo/platform/basic.h"
#include "mongo/bson/bsonobjbuilder.h"
#include "mongo/client/remote_command_targeter_mock.h"
#include "mongo/db/client.h"
#include "mongo/db/commands.h"
#include "mongo/db/dbdirectclient.h"
#include "mongo/db/namespace_string.h"
#include "mongo/db/s/implicit_create_collection.h"
#include "mongo/s/request_types/create_collection_gen.h"
#include "mongo/s/shard_server_test_fixture.h"
#include "mongo/unittest/unittest.h"
#include "mongo/util/assert_util.h"
namespace mongo {
namespace {
class ImplicitCreateTest : public ShardServerTestFixture {
public:
void expectConfigCreate(const NamespaceString& expectedNss, const Status& response) {
onCommand([&](const executor::RemoteCommandRequest& request) {
auto configHostStatus = configTargeterMock()->findHost(nullptr, {});
ASSERT_OK(configHostStatus.getStatus());
auto configHost = configHostStatus.getValue();
ASSERT_EQ(configHost, request.target);
auto cmdName = request.cmdObj.firstElement().fieldName();
ASSERT_EQ(ConfigsvrCreateCollection::kCommandName, cmdName);
ASSERT_EQ("admin", request.dbname);
ASSERT_EQ(expectedNss.ns(), request.cmdObj.firstElement().String());
BSONObjBuilder responseBuilder;
CommandHelpers::appendCommandStatusNoThrow(responseBuilder, response);
return responseBuilder.obj();
});
}
};
TEST_F(ImplicitCreateTest, NormalCreate) {
const NamespaceString kNs("test.user");
auto future = launchAsync([this, &kNs] {
ON_BLOCK_EXIT([&] { Client::destroy(); });
Client::initThreadIfNotAlready("Test");
auto opCtx = cc().makeOperationContext();
ASSERT_OK(onCannotImplicitlyCreateCollection(opCtx.get(), kNs));
});
expectConfigCreate(kNs, Status::OK());
future.timed_get(kFutureTimeout);
}
TEST_F(ImplicitCreateTest, CanCallOnCannotImplicitAgainAfterError) {
const NamespaceString kNs("test.user");
auto future = launchAsync([this, &kNs] {
ON_BLOCK_EXIT([&] { Client::destroy(); });
Client::initThreadIfNotAlready("Test");
auto opCtx = cc().makeOperationContext();
auto status = onCannotImplicitlyCreateCollection(opCtx.get(), kNs);
ASSERT_EQ(ErrorCodes::FailPointEnabled, status);
});
// return a non retryable error (just for testing) so the handler won't retry.
expectConfigCreate(kNs, {ErrorCodes::FailPointEnabled, "deliberate error"});
future.timed_get(kFutureTimeout);
// Retry, but this time config server will return success
future = launchAsync([this, &kNs] {
ON_BLOCK_EXIT([&] { Client::destroy(); });
Client::initThreadIfNotAlready("Test");
auto opCtx = cc().makeOperationContext();
ASSERT_OK(onCannotImplicitlyCreateCollection(opCtx.get(), kNs));
});
expectConfigCreate(kNs, Status::OK());
future.timed_get(kFutureTimeout);
}
TEST_F(ImplicitCreateTest, ShouldNotCallConfigCreateIfCollectionExists) {
const NamespaceString kNs("test.user");
auto future = launchAsync([this, &kNs] {
ON_BLOCK_EXIT([&] { Client::destroy(); });
Client::initThreadIfNotAlready("Test");
auto opCtx = cc().makeOperationContext();
auto status = onCannotImplicitlyCreateCollection(opCtx.get(), kNs);
ASSERT_EQ(ErrorCodes::FailPointEnabled, status);
});
// return a non retryable error (just for testing) so the handler won't retry.
expectConfigCreate(kNs, {ErrorCodes::FailPointEnabled, "deliberate error"});
future.timed_get(kFutureTimeout);
// Simulate config server successfully creating the collection despite returning error.
DBDirectClient client(operationContext());
BSONObj result;
ASSERT_TRUE(
client.runCommand(kNs.db().toString(), BSON("create" << kNs.coll().toString()), result));
// Retry, but this time config server will return success
future = launchAsync([this, &kNs] {
ON_BLOCK_EXIT([&] { Client::destroy(); });
Client::initThreadIfNotAlready("Test");
auto opCtx = cc().makeOperationContext();
ASSERT_OK(onCannotImplicitlyCreateCollection(opCtx.get(), kNs));
});
// Not expecting this shard to send any remote command.
future.timed_get(kFutureTimeout);
}
} // unnamed namespace
} // namespace mongo
|
winixi/wechat-pay | src/main/java/sh/evc/sdk/wechat/pay/response/mmpaymkttransfers/TransferQueryResponse.java | package sh.evc.sdk.wechat.pay.response.mmpaymkttransfers;
import com.fasterxml.jackson.annotation.JsonProperty;
import sh.evc.sdk.wechat.pay.dict.TransferStatus;
import sh.evc.sdk.wechat.pay.response.XmlResponse;
/**
* 企业付款查询
*
* @author winixi
* @date 2021/2/17 2:20 PM
*/
public class TransferQueryResponse extends XmlResponse {
/**
* 商户号的appid
*/
@JsonProperty("appid")
private String appId;
/**
* 微信支付分配的商户号
*/
@JsonProperty("mchid")
private String mchId;
/**
* 商户调用企业付款API时使用的商户订单号
*/
private String partnerTradeNo;
/**
* 调用企业付款API时,微信系统内部产生的单号
*/
private String detailId;
/**
* 转账状态
*/
private TransferStatus status;
/**
* 如果失败则有失败原因
*/
private String reason;
/**
* 收款用户openid
*/
@JsonProperty("openid")
private String openId;
/**
* 收款用户姓名
*/
private String transferName;
/**
* 付款金额单位为“分”
*/
private Integer paymentAmount;
/**
* 发起转账的时间
*/
private String transferTime;
/**
* 企业付款成功时间
*/
private String paymentTime;
/**
* 企业付款备注
*/
private String desc;
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public String getMchId() {
return mchId;
}
public void setMchId(String mchId) {
this.mchId = mchId;
}
public String getPartnerTradeNo() {
return partnerTradeNo;
}
public void setPartnerTradeNo(String partnerTradeNo) {
this.partnerTradeNo = partnerTradeNo;
}
public String getDetailId() {
return detailId;
}
public void setDetailId(String detailId) {
this.detailId = detailId;
}
public TransferStatus getStatus() {
return status;
}
public void setStatus(TransferStatus status) {
this.status = status;
}
public String getReason() {
return reason;
}
public void setReason(String reason) {
this.reason = reason;
}
public String getOpenId() {
return openId;
}
public void setOpenId(String openId) {
this.openId = openId;
}
public String getTransferName() {
return transferName;
}
public void setTransferName(String transferName) {
this.transferName = transferName;
}
public Integer getPaymentAmount() {
return paymentAmount;
}
public void setPaymentAmount(Integer paymentAmount) {
this.paymentAmount = paymentAmount;
}
public String getTransferTime() {
return transferTime;
}
public void setTransferTime(String transferTime) {
this.transferTime = transferTime;
}
public String getPaymentTime() {
return paymentTime;
}
public void setPaymentTime(String paymentTime) {
this.paymentTime = paymentTime;
}
public String getDesc() {
return desc;
}
public void setDesc(String desc) {
this.desc = desc;
}
@Override
public String toString() {
return "TransferQueryResponse{" +
"appId='" + appId + '\'' +
", mchId='" + mchId + '\'' +
", partnerTradeNo='" + partnerTradeNo + '\'' +
", detailId='" + detailId + '\'' +
", status=" + status +
", reason='" + reason + '\'' +
", openId='" + openId + '\'' +
", transferName='" + transferName + '\'' +
", paymentAmount=" + paymentAmount +
", transferTime='" + transferTime + '\'' +
", paymentTime='" + paymentTime + '\'' +
", desc='" + desc + '\'' +
"} " + super.toString();
}
}
|
fossabot/pwa | libraries/common/selectors/router.js | <gh_stars>0
import { createSelector } from 'reselect';
/**
* @param {Object} state The application state.
* @return {Object}
*/
export const getRouterState = state => state.router;
/**
* @param {Object} state The application state.
* @return {Array}
*/
export const getRouterStack = createSelector(
getRouterState,
state => (state && state.stack ? state.stack : [])
);
/**
* @param {Object} state The application state.
* @returns {Object|null}
*/
export const getCurrentRoute = createSelector(
getRouterState,
getRouterStack,
(state, props = {}) => props.routeId,
(router, stack, routeId) => {
if (!router || !router.currentRoute) {
return null;
}
if (!routeId) {
return router.currentRoute;
}
return stack.find(entry => entry.id === routeId);
}
);
/**
* @param {Object} state The application state.
* @returns {Object|null}
*/
export const getCurrentParams = createSelector(
getCurrentRoute,
(route) => {
if (!route || !route.params) {
return null;
}
return route.params;
}
);
/**
* @param {Object} state The application state.
* @returns {string|null} The current history pathname.
*/
export const getCurrentPathname = createSelector(
getCurrentRoute,
(route) => {
if (!route || !route.pathname) {
return null;
}
return route.pathname;
}
);
/**
* @param {Object} state The application state.
* @returns {Object|null} The current history query.
*/
export const getCurrentQuery = createSelector(
getCurrentRoute,
(route) => {
if (!route || !route.query) {
return null;
}
return route.query;
}
);
/**
* @param {Object} state The application state.
* @returns {string|null} The current history search query.
*/
export const getCurrentSearchQuery = createSelector(
getCurrentQuery,
(query) => {
if (!query || !query.s) {
return null;
}
return query.s;
}
);
/**
* @param {Object} state The application state.
* @returns {string|null} The current history entry state.
*/
export const getCurrentState = createSelector(
getCurrentRoute,
(route) => {
if (!route || !route.state) {
return null;
}
return route.state;
}
);
|
ZhnZhn/ZhnZhn.github.io | src/flux/stores/ChartSlice.js | <filename>src/flux/stores/ChartSlice.js
import {
LPAT_LOADING,
LPAT_LOADING_COMPLETE,
LPAT_LOADING_FAILED
} from '../actions/LoadingProgressActions'
import { ChartActionTypes as CHAT } from '../actions/ChartActions';
import { BAT_UPDATE_BROWSER_MENU } from '../actions/BrowserActions';
import ChartLogic from './chart/ChartLogic'
const {
isChartExist,
loadConfig, showChart,
removeConfig,
toTop,
updateMovingValues,
sortBy,
removeAll,
checkBrowserChartTypes,
scanPostAdded,
setAlertItemIdTo
} = ChartLogic;
const CONSOLE_LOG_STYLE = 'color:rgb(237, 88, 19);';
const _logErrorToConsole = function({
alertCaption,
alertItemId,
alertDescr,
}){
const _title = [alertCaption, alertItemId]
.filter(Boolean)
.join(": ");
console.log('%c'+ _title, CONSOLE_LOG_STYLE);
console.log('%c' + alertDescr, CONSOLE_LOG_STYLE);
}
const ChartSlice = {
charts : {},
getConfigs(chartType){
return this.charts[chartType];
},
isChartExist(option){
checkBrowserChartTypes(this, option)
const { chartType, key } = option;
return isChartExist(this.charts, chartType, key);
},
onLoadStock(){
this.triggerLoadingProgress(LPAT_LOADING)
},
onLoadStockCompleted(option, config){
const {
chartType, browserType,
dialogConf,
limitRemaining
} = option;
this.addMenuItemCounter(chartType, browserType);
const _dialogConf = dialogConf
|| this.getDialogConf(void 0, chartType);
const {
chartSlice, Comp
} = loadConfig(this.charts, config, option, _dialogConf, this);
if (chartSlice){
this.trigger(CHAT.LOAD_STOCK_COMPLETED, chartSlice);
} else {
this.trigger(CHAT.INIT_AND_SHOW_CHART, Comp);
}
this.triggerLoadingProgress(LPAT_LOADING_COMPLETE)
this.triggerLimitRemaining(limitRemaining);
this.trigger(BAT_UPDATE_BROWSER_MENU, browserType);
},
onLoadStockAdded(option={}){
this.triggerLoadingProgress(LPAT_LOADING_COMPLETE)
scanPostAdded(this, option)
},
onLoadStockFailed(option){
this.triggerLoadingProgress(LPAT_LOADING_FAILED)
setAlertItemIdTo(option)
this.showAlertDialog(option);
_logErrorToConsole(option);
},
onLoadStockByQuery(){
this.onLoadStock()
},
onLoadStockByQueryCompleted(option, config){
this.onLoadStockCompleted(option, config)
},
onLoadStockByQueryFailed(option){
this.onLoadStockFailed(option)
},
onShowChart(chartType, browserType, dialogConfOr){
this.setMenuItemOpen(chartType, browserType);
const dialogConf = this.getDialogConf(dialogConfOr, chartType);
const {
chartSlice, Comp
} = showChart(this.charts, chartType, browserType, dialogConf, this);
if (chartSlice){
this.trigger(CHAT.SHOW_CHART, chartSlice);
} else {
this.trigger(CHAT.INIT_AND_SHOW_CHART, Comp)
}
this.trigger(BAT_UPDATE_BROWSER_MENU, browserType);
},
resetActiveChart(id){
if (
this.activeChart &&
this.activeChart.options.zhConfig.id === id
){
this.activeChart = null;
}
},
onCloseChart(chartType, browserType, chartId){
const {
chartSlice, isRemoved
} = removeConfig(this.charts, chartType, chartId)
if (isRemoved) {
this.resetActiveChart(chartId)
this.minusMenuItemCounter(chartType, browserType);
this.trigger(CHAT.CLOSE_CHART, chartSlice);
this.trigger(BAT_UPDATE_BROWSER_MENU, browserType);
}
},
onToTop(chartType, id){
const chartSlice = toTop(this.charts, chartType, id)
this.trigger(CHAT.SHOW_CHART, chartSlice);
},
onCopy(chart){
this.fromChart = chart
},
getCopyFromChart(){
return this.fromChart;
},
onUpdateMovingValues(chartType, movingValues){
updateMovingValues(this.charts, chartType, movingValues)
},
onSortBy(chartType, by){
const chartSlice = sortBy(this.charts, chartType, by);
this.trigger(CHAT.SHOW_CHART, chartSlice);
},
onRemoveAll(chartType, browserType){
const chartSlice = removeAll(this.charts, chartType);
this.resetMenuItemCounter(chartType, browserType)
this.uncheckActiveCheckbox()
this.trigger(CHAT.SHOW_CHART, chartSlice);
this.trigger(BAT_UPDATE_BROWSER_MENU, browserType);
}
};
export default ChartSlice
|
w2ogroup/rave-old | rave-components/rave-core/src/test/java/org/apache/rave/portal/service/impl/DefaultWidgetMarketplaceServiceTest.java | <reponame>w2ogroup/rave-old
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.rave.portal.service.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.rave.portal.model.Widget;
import org.apache.rave.portal.model.util.SearchResult;
import org.apache.rave.portal.service.PortalPreferenceService;
import org.apache.rave.portal.service.RemoteWidgetResolverService;
import org.apache.rave.portal.service.WidgetMarketplaceService;
import org.apache.rave.portal.service.WidgetMetadataResolver;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import static org.easymock.EasyMock.*;
public class DefaultWidgetMarketplaceServiceTest {
static RemoteWidgetResolverService widgetResolverService;
static PortalPreferenceService prefs;
static WidgetMarketplaceService service;
static Map<String, WidgetMetadataResolver> widgetMetadataResolverMap;
static List<WidgetMetadataResolver> metadataResolver;
@BeforeClass
public static void setup(){
prefs = createMock(PortalPreferenceService.class);
metadataResolver = new ArrayList<WidgetMetadataResolver>();
service = new DefaultWidgetMarketplaceService(widgetResolverService, prefs);
}
@Ignore
@Test
public void getSearchResults() throws Exception{
SearchResult<Widget> results = service.getWidgetsByFreeTextSearch("wookie", 0, 10);
for (Widget widget: results.getResultSet()){
System.out.println(widget.getTitle());
}
System.out.println(results.getTotalResults() + " total results");
}
@Ignore
@Test
public void getCategoryResults() throws Exception{
prefs = createMock(PortalPreferenceService.class);
service = new DefaultWidgetMarketplaceService(widgetResolverService, prefs);
SearchResult<Widget> results = service.getWidgetsByCategory("a", 0, 10);
for (Widget widget: results.getResultSet()){
System.out.println(widget.getTitle());
}
System.out.println(results.getTotalResults() + " total results");
}
@Ignore
@Test
public void getCategories() throws Exception{
prefs = createMock(PortalPreferenceService.class);
service = new DefaultWidgetMarketplaceService(widgetResolverService, prefs);
List<String> results = service.getCategories();
for (String category: results){
System.out.println(category);
}
}
} |
colleenmcginnis/apm-server | model/modeldecoder/v2/metadata_test.go | // Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package v2
import (
"net"
"reflect"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/elastic/apm-server/decoder"
"github.com/elastic/apm-server/model"
"github.com/elastic/apm-server/model/modeldecoder/modeldecodertest"
)
func isMetadataException(key string) bool {
return isUnmappedMetadataField(key) || isEventField(key) || isIgnoredPrefix(key)
}
func isIgnoredPrefix(key string) bool {
ignore := []string{"Labels", "NumericLabels"}
for _, k := range ignore {
if strings.HasPrefix(key, k) {
return true
}
}
return false
}
// unmappedMetadataFields holds the list of model fields that have no equivalent
// in the metadata input type.
func isUnmappedMetadataField(key string) bool {
switch key {
case
"Child",
"Child.ID",
"Client.Domain",
"Client.IP",
"Client.Port",
"Cloud.Origin",
"Container.Runtime",
"Container.ImageName",
"Container.ImageTag",
"Container.Name",
"DataStream",
"DataStream.Type",
"DataStream.Dataset",
"DataStream.Namespace",
"Destination",
"Destination.Address",
"Destination.IP",
"Destination.Port",
"ECSVersion",
"FAAS",
"FAAS.ID",
"FAAS.Coldstart",
"FAAS.Execution",
"FAAS.TriggerType",
"FAAS.TriggerRequestID",
"FAAS.Name",
"FAAS.Version",
"HTTP",
"HTTP.Request",
"HTTP.Response",
"HTTP.Version",
"Message",
"Network",
"Network.Connection",
"Network.Connection.Subtype",
"Network.Carrier",
"Network.Carrier.Name",
"Network.Carrier.MCC",
"Network.Carrier.MNC",
"Network.Carrier.ICC",
"Observer",
"Observer.EphemeralID",
"Observer.Hostname",
"Observer.ID",
"Observer.Name",
"Observer.Type",
"Observer.Version",
"Observer.VersionMajor",
"Parent",
"Parent.ID",
"Process.CommandLine",
"Process.Executable",
"Processor",
"Processor.Event",
"Processor.Name",
"Host.OS.Full",
"Host.OS.Type",
"Host.ID",
"Host.IP",
"Host.Type",
"UserAgent",
"UserAgent.Name",
"UserAgent.Original",
"Event",
"Event.Duration",
"Event.Outcome",
"Event.Severity",
"Event.Action",
"Log",
"Log.Level",
"Service.Origin",
"Service.Origin.ID",
"Service.Origin.Name",
"Service.Origin.Version",
"Service.Target",
"Service.Target.Name",
"Service.Target.Type",
"Session.ID",
"Session",
"Session.Sequence",
"Source.Domain",
"Source.IP",
"Source.Port",
"Source.NAT",
"Trace",
"Trace.ID",
"URL",
"URL.Original",
"URL.Scheme",
"URL.Full",
"URL.Domain",
"URL.Port",
"URL.Path",
"URL.Query",
"URL.Fragment":
return true
}
return false
}
func isEventField(key string) bool {
for _, prefix := range []string{"Error", "Metricset", "ProfileSample", "Span", "Transaction"} {
if key == prefix || strings.HasPrefix(key, prefix+".") {
return true
}
}
return false
}
func initializedInputMetadata(values *modeldecodertest.Values) (metadata, model.APMEvent) {
var input metadata
var out model.APMEvent
modeldecodertest.SetStructValues(&input, values)
mapToMetadataModel(&input, &out)
modeldecodertest.SetStructValues(&out, values, func(key string, field, value reflect.Value) bool {
return isUnmappedMetadataField(key) || isEventField(key)
})
return input, out
}
func TestResetMetadataOnRelease(t *testing.T) {
inp := `{"metadata":{"service":{"name":"service-a"}}}`
m := fetchMetadataRoot()
require.NoError(t, decoder.NewJSONDecoder(strings.NewReader(inp)).Decode(m))
require.True(t, m.IsSet())
releaseMetadataRoot(m)
assert.False(t, m.IsSet())
}
func TestDecodeMetadata(t *testing.T) {
for _, tc := range []struct {
name string
input string
decodeFn func(decoder.Decoder, *model.APMEvent) error
}{
{name: "decodeMetadata", decodeFn: DecodeMetadata,
input: `{"labels":{"a":"b","c":true,"d":1234,"e":1234.11},"service":{"name":"user-service","agent":{"name":"go","version":"1.0.0"}}}`},
{name: "decodeNestedMetadata", decodeFn: DecodeNestedMetadata,
input: `{"metadata":{"labels":{"a":"b","c":true,"d":1234,"e":1234.11},"service":{"name":"user-service","agent":{"name":"go","version":"1.0.0"}}}}`},
} {
t.Run("decode", func(t *testing.T) {
var out model.APMEvent
dec := decoder.NewJSONDecoder(strings.NewReader(tc.input))
require.NoError(t, tc.decodeFn(dec, &out))
assert.Equal(t, model.APMEvent{
Service: model.Service{Name: "user-service"},
Agent: model.Agent{Name: "go", Version: "1.0.0"},
Labels: model.Labels{
"a": {Value: "b"},
"c": {Value: "true"},
},
NumericLabels: model.NumericLabels{
"d": {Value: float64(1234)},
"e": {Value: float64(1234.11)},
},
}, out)
err := tc.decodeFn(decoder.NewJSONDecoder(strings.NewReader(`malformed`)), &out)
require.Error(t, err)
assert.Contains(t, err.Error(), "decode")
})
t.Run("validate", func(t *testing.T) {
inp := `{}`
var out model.APMEvent
err := tc.decodeFn(decoder.NewJSONDecoder(strings.NewReader(inp)), &out)
require.Error(t, err)
assert.Contains(t, err.Error(), "validation")
})
}
}
func TestDecodeMapToMetadataModel(t *testing.T) {
t.Run("overwrite", func(t *testing.T) {
// setup:
// create initialized modeldecoder and empty model metadata
// map modeldecoder to model metadata and manually set
// enhanced data that are never set by the modeldecoder
defaultVal := modeldecodertest.DefaultValues()
input, out := initializedInputMetadata(defaultVal)
out.Timestamp = defaultVal.Time
// iterate through model and assert values are set
modeldecodertest.AssertStructValues(t, &out, isMetadataException, defaultVal)
// overwrite model metadata with specified Values
// then iterate through model and assert values are overwritten
otherVal := modeldecodertest.NonDefaultValues()
// System.IP and Client.IP are not set by decoder,
// therefore their values are not updated
otherVal.Update(defaultVal.IP)
input.Reset()
modeldecodertest.SetStructValues(&input, otherVal)
out.Timestamp = otherVal.Time
mapToMetadataModel(&input, &out)
modeldecodertest.AssertStructValues(t, &out, isMetadataException, otherVal)
// map an empty modeldecoder metadata to the model
// and assert values are unchanged
input.Reset()
modeldecodertest.SetZeroStructValues(&input)
mapToMetadataModel(&input, &out)
modeldecodertest.AssertStructValues(t, &out, isMetadataException, otherVal)
})
t.Run("reused-memory", func(t *testing.T) {
var out2 model.APMEvent
defaultVal := modeldecodertest.DefaultValues()
input, out1 := initializedInputMetadata(defaultVal)
out1.Timestamp = defaultVal.Time
// iterate through model and assert values are set
modeldecodertest.AssertStructValues(t, &out1, isMetadataException, defaultVal)
// overwrite model metadata with specified Values
// then iterate through model and assert values are overwritten
otherVal := modeldecodertest.NonDefaultValues()
// System.IP and Client.IP are not set by decoder,
// therefore their values are not updated
otherVal.Update(defaultVal.IP)
input.Reset()
modeldecodertest.SetStructValues(&input, otherVal)
mapToMetadataModel(&input, &out2)
out2.Timestamp = otherVal.Time
out2.Host.IP = []net.IP{defaultVal.IP}
out2.Client.IP = defaultVal.IP
out2.Source.IP = defaultVal.IP
modeldecodertest.AssertStructValues(t, &out2, isMetadataException, otherVal)
modeldecodertest.AssertStructValues(t, &out1, isMetadataException, defaultVal)
})
t.Run("system", func(t *testing.T) {
var input metadata
var out model.APMEvent
// full input information
modeldecodertest.SetStructValues(&input, modeldecodertest.DefaultValues())
input.System.ConfiguredHostname.Set("configured-host")
input.System.DetectedHostname.Set("detected-host")
input.System.DeprecatedHostname.Set("deprecated-host")
mapToMetadataModel(&input, &out)
assert.Equal(t, "configured-host", out.Host.Name)
assert.Equal(t, "detected-host", out.Host.Hostname)
// no detected-host information
out = model.APMEvent{}
input.System.DetectedHostname.Reset()
mapToMetadataModel(&input, &out)
assert.Equal(t, "configured-host", out.Host.Name)
assert.Empty(t, out.Host.Hostname)
// no configured-host information
out = model.APMEvent{}
input.System.ConfiguredHostname.Reset()
mapToMetadataModel(&input, &out)
assert.Empty(t, out.Host.Name)
assert.Equal(t, "deprecated-host", out.Host.Hostname)
// no host information given
out = model.APMEvent{}
input.System.DeprecatedHostname.Reset()
assert.Empty(t, out.Host.Name)
assert.Empty(t, out.Host.Hostname)
})
}
|
VisualAcademy/Java | Java/Whitespace/Whitespace.java | <gh_stars>0
// 공백(Whitespace): 프로그래밍 언어에서 공백은 무시된다.
public class Whitespace
{
public static void main(String[] arsg)
{
System.out.println("Java");
System.out.println( "Java" ) ;
System
.out
.println(
"Java"
)
;
}
}
|
KruthikaShyamSundar/wildlife_park | node_modules/svgo/plugins/removeUselessStrokeAndFill.js | <filename>node_modules/svgo/plugins/removeUselessStrokeAndFill.js
'use strict';
const { visit, visitSkip, detachNodeFromParent } = require('../lib/xast.js');
const { collectStylesheet, computeStyle } = require('../lib/style.js');
const { elemsGroups } = require('./_collections.js');
exports.type = 'visitor';
exports.name = 'removeUselessStrokeAndFill';
exports.active = true;
exports.description = 'removes useless stroke and fill attributes';
/**
* Remove useless stroke and fill attrs.
*
* @author <NAME>
*
* @type {import('../lib/types').Plugin<{
* stroke?: boolean,
* fill?: boolean,
* removeNone?: boolean
* }>}
*/
exports.fn = (root, params) => {
const {
stroke: removeStroke = true,
fill: removeFill = true,
removeNone = false,
} = params;
// style and script elements deoptimise this plugin
let hasStyleOrScript = false;
visit(root, {
element: {
enter: (node) => {
if (node.name === 'style' || node.name === 'script') {
hasStyleOrScript = true;
}
},
},
});
if (hasStyleOrScript) {
return null;
}
const stylesheet = collectStylesheet(root);
return {
element: {
enter: (node, parentNode) => {
// id attribute deoptimise the whole subtree
if (node.attributes.id != null) {
return visitSkip;
}
if (elemsGroups.shape.includes(node.name) == false) {
return;
}
const computedStyle = computeStyle(stylesheet, node);
const stroke = computedStyle.stroke;
const strokeOpacity = computedStyle['stroke-opacity'];
const strokeWidth = computedStyle['stroke-width'];
const markerEnd = computedStyle['marker-end'];
const fill = computedStyle.fill;
const fillOpacity = computedStyle['fill-opacity'];
const computedParentStyle =
parentNode.type === 'element'
? computeStyle(stylesheet, parentNode)
: null;
const parentStroke =
computedParentStyle == null ? null : computedParentStyle.stroke;
// remove stroke*
if (removeStroke) {
if (
stroke == null ||
(stroke.type === 'static' && stroke.value == 'none') ||
(strokeOpacity != null &&
strokeOpacity.type === 'static' &&
strokeOpacity.value === '0') ||
(strokeWidth != null &&
strokeWidth.type === 'static' &&
strokeWidth.value === '0')
) {
// stroke-width may affect the size of marker-end
// marker is not visible when stroke-width is 0
if (
(strokeWidth != null &&
strokeWidth.type === 'static' &&
strokeWidth.value === '0') ||
markerEnd == null
) {
for (const name of Object.keys(node.attributes)) {
if (name.startsWith('stroke')) {
delete node.attributes[name];
}
}
// set explicit none to not inherit from parent
if (
parentStroke != null &&
parentStroke.type === 'static' &&
parentStroke.value !== 'none'
) {
node.attributes.stroke = 'none';
}
}
}
}
// remove fill*
if (removeFill) {
if (
(fill != null && fill.type === 'static' && fill.value === 'none') ||
(fillOpacity != null &&
fillOpacity.type === 'static' &&
fillOpacity.value === '0')
) {
for (const name of Object.keys(node.attributes)) {
if (name.startsWith('fill-')) {
delete node.attributes[name];
}
}
if (
fill == null ||
(fill.type === 'static' && fill.value !== 'none')
) {
node.attributes.fill = 'none';
}
}
}
if (removeNone) {
if (
(stroke == null || node.attributes.stroke === 'none') &&
((fill != null &&
fill.type === 'static' &&
fill.value === 'none') ||
node.attributes.fill === 'none')
) {
detachNodeFromParent(node, parentNode);
}
}
},
},
};
};
|
xuepingiw/open_source_startalk | startalk_ui/GeneratedFiles/moc/moc_WebPage.cpp | <filename>startalk_ui/GeneratedFiles/moc/moc_WebPage.cpp
/****************************************************************************
** Meta object code from reading C++ file 'WebPage.h'
**
** Created by: The Qt Meta Object Compiler version 67 (Qt 5.2.1)
**
** WARNING! All changes made in this file will be lost!
*****************************************************************************/
#include "../../WebPage.h"
#include <QtCore/qbytearray.h>
#include <QtCore/qmetatype.h>
#if !defined(Q_MOC_OUTPUT_REVISION)
#error "The header file 'WebPage.h' doesn't include <QObject>."
#elif Q_MOC_OUTPUT_REVISION != 67
#error "This file was generated using the moc from 5.2.1. It"
#error "cannot be used with the include files from this version of Qt."
#error "(The moc has changed too much.)"
#endif
QT_BEGIN_MOC_NAMESPACE
struct qt_meta_stringdata_WebPage_t {
QByteArrayData data[9];
char stringdata[81];
};
#define QT_MOC_LITERAL(idx, ofs, len) \
Q_STATIC_BYTE_ARRAY_DATA_HEADER_INITIALIZER_WITH_OFFSET(len, \
offsetof(qt_meta_stringdata_WebPage_t, stringdata) + ofs \
- idx * sizeof(QByteArrayData) \
)
static const qt_meta_stringdata_WebPage_t qt_meta_stringdata_WebPage = {
{
QT_MOC_LITERAL(0, 0, 7),
QT_MOC_LITERAL(1, 8, 8),
QT_MOC_LITERAL(2, 17, 0),
QT_MOC_LITERAL(3, 18, 3),
QT_MOC_LITERAL(4, 22, 13),
QT_MOC_LITERAL(5, 36, 12),
QT_MOC_LITERAL(6, 49, 13),
QT_MOC_LITERAL(7, 63, 10),
QT_MOC_LITERAL(8, 74, 5)
},
"WebPage\0openLink\0\0url\0ButtonClicked\0"
"onPreLoadUrl\0onFrameCreate\0QWebFrame*\0"
"frame\0"
};
#undef QT_MOC_LITERAL
static const uint qt_meta_data_WebPage[] = {
// content:
7, // revision
0, // classname
0, 0, // classinfo
4, 14, // methods
0, 0, // properties
0, 0, // enums/sets
0, 0, // constructors
0, // flags
2, // signalCount
// signals: name, argc, parameters, tag, flags
1, 1, 34, 2, 0x06,
4, 1, 37, 2, 0x06,
// slots: name, argc, parameters, tag, flags
5, 0, 40, 2, 0x08,
6, 1, 41, 2, 0x08,
// signals: parameters
QMetaType::Void, QMetaType::QUrl, 3,
QMetaType::Void, QMetaType::QUrl, 3,
// slots: parameters
QMetaType::Void,
QMetaType::Void, 0x80000000 | 7, 8,
0 // eod
};
void WebPage::qt_static_metacall(QObject *_o, QMetaObject::Call _c, int _id, void **_a)
{
if (_c == QMetaObject::InvokeMetaMethod) {
WebPage *_t = static_cast<WebPage *>(_o);
switch (_id) {
case 0: _t->openLink((*reinterpret_cast< const QUrl(*)>(_a[1]))); break;
case 1: _t->ButtonClicked((*reinterpret_cast< const QUrl(*)>(_a[1]))); break;
case 2: _t->onPreLoadUrl(); break;
case 3: _t->onFrameCreate((*reinterpret_cast< QWebFrame*(*)>(_a[1]))); break;
default: ;
}
} else if (_c == QMetaObject::IndexOfMethod) {
int *result = reinterpret_cast<int *>(_a[0]);
void **func = reinterpret_cast<void **>(_a[1]);
{
typedef void (WebPage::*_t)(const QUrl & );
if (*reinterpret_cast<_t *>(func) == static_cast<_t>(&WebPage::openLink)) {
*result = 0;
}
}
{
typedef void (WebPage::*_t)(const QUrl & );
if (*reinterpret_cast<_t *>(func) == static_cast<_t>(&WebPage::ButtonClicked)) {
*result = 1;
}
}
}
}
const QMetaObject WebPage::staticMetaObject = {
{ &QWebPage::staticMetaObject, qt_meta_stringdata_WebPage.data,
qt_meta_data_WebPage, qt_static_metacall, 0, 0}
};
const QMetaObject *WebPage::metaObject() const
{
return QObject::d_ptr->metaObject ? QObject::d_ptr->dynamicMetaObject() : &staticMetaObject;
}
void *WebPage::qt_metacast(const char *_clname)
{
if (!_clname) return 0;
if (!strcmp(_clname, qt_meta_stringdata_WebPage.stringdata))
return static_cast<void*>(const_cast< WebPage*>(this));
return QWebPage::qt_metacast(_clname);
}
int WebPage::qt_metacall(QMetaObject::Call _c, int _id, void **_a)
{
_id = QWebPage::qt_metacall(_c, _id, _a);
if (_id < 0)
return _id;
if (_c == QMetaObject::InvokeMetaMethod) {
if (_id < 4)
qt_static_metacall(this, _c, _id, _a);
_id -= 4;
} else if (_c == QMetaObject::RegisterMethodArgumentMetaType) {
if (_id < 4)
*reinterpret_cast<int*>(_a[0]) = -1;
_id -= 4;
}
return _id;
}
// SIGNAL 0
void WebPage::openLink(const QUrl & _t1)
{
void *_a[] = { 0, const_cast<void*>(reinterpret_cast<const void*>(&_t1)) };
QMetaObject::activate(this, &staticMetaObject, 0, _a);
}
// SIGNAL 1
void WebPage::ButtonClicked(const QUrl & _t1)
{
void *_a[] = { 0, const_cast<void*>(reinterpret_cast<const void*>(&_t1)) };
QMetaObject::activate(this, &staticMetaObject, 1, _a);
}
QT_END_MOC_NAMESPACE
|
aklinker1/miasma | internal/server/gen/restapi/operations/update_app_traefik_config_responses.go | <reponame>aklinker1/miasma
// Code generated by go-swagger; DO NOT EDIT.
package operations
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"net/http"
"github.com/go-openapi/runtime"
"github.com/aklinker1/miasma/package/models"
)
// UpdateAppTraefikConfigOKCode is the HTTP code returned for type UpdateAppTraefikConfigOK
const UpdateAppTraefikConfigOKCode int = 200
/*UpdateAppTraefikConfigOK Created
swagger:response updateAppTraefikConfigOK
*/
type UpdateAppTraefikConfigOK struct {
/*
In: Body
*/
Payload *models.TraefikPluginConfig `json:"body,omitempty"`
}
// NewUpdateAppTraefikConfigOK creates UpdateAppTraefikConfigOK with default headers values
func NewUpdateAppTraefikConfigOK() *UpdateAppTraefikConfigOK {
return &UpdateAppTraefikConfigOK{}
}
// WithPayload adds the payload to the update app traefik config o k response
func (o *UpdateAppTraefikConfigOK) WithPayload(payload *models.TraefikPluginConfig) *UpdateAppTraefikConfigOK {
o.Payload = payload
return o
}
// SetPayload sets the payload to the update app traefik config o k response
func (o *UpdateAppTraefikConfigOK) SetPayload(payload *models.TraefikPluginConfig) {
o.Payload = payload
}
// WriteResponse to the client
func (o *UpdateAppTraefikConfigOK) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {
rw.WriteHeader(200)
if o.Payload != nil {
payload := o.Payload
if err := producer.Produce(rw, payload); err != nil {
panic(err) // let the recovery middleware deal with this
}
}
}
// UpdateAppTraefikConfigBadRequestCode is the HTTP code returned for type UpdateAppTraefikConfigBadRequest
const UpdateAppTraefikConfigBadRequestCode int = 400
/*UpdateAppTraefikConfigBadRequest Traefik plugin is not installed
swagger:response updateAppTraefikConfigBadRequest
*/
type UpdateAppTraefikConfigBadRequest struct {
/*
In: Body
*/
Payload string `json:"body,omitempty"`
}
// NewUpdateAppTraefikConfigBadRequest creates UpdateAppTraefikConfigBadRequest with default headers values
func NewUpdateAppTraefikConfigBadRequest() *UpdateAppTraefikConfigBadRequest {
return &UpdateAppTraefikConfigBadRequest{}
}
// WithPayload adds the payload to the update app traefik config bad request response
func (o *UpdateAppTraefikConfigBadRequest) WithPayload(payload string) *UpdateAppTraefikConfigBadRequest {
o.Payload = payload
return o
}
// SetPayload sets the payload to the update app traefik config bad request response
func (o *UpdateAppTraefikConfigBadRequest) SetPayload(payload string) {
o.Payload = payload
}
// WriteResponse to the client
func (o *UpdateAppTraefikConfigBadRequest) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {
rw.WriteHeader(400)
payload := o.Payload
if err := producer.Produce(rw, payload); err != nil {
panic(err) // let the recovery middleware deal with this
}
}
/*UpdateAppTraefikConfigDefault Unknown Error
swagger:response updateAppTraefikConfigDefault
*/
type UpdateAppTraefikConfigDefault struct {
_statusCode int
/*
In: Body
*/
Payload string `json:"body,omitempty"`
}
// NewUpdateAppTraefikConfigDefault creates UpdateAppTraefikConfigDefault with default headers values
func NewUpdateAppTraefikConfigDefault(code int) *UpdateAppTraefikConfigDefault {
if code <= 0 {
code = 500
}
return &UpdateAppTraefikConfigDefault{
_statusCode: code,
}
}
// WithStatusCode adds the status to the update app traefik config default response
func (o *UpdateAppTraefikConfigDefault) WithStatusCode(code int) *UpdateAppTraefikConfigDefault {
o._statusCode = code
return o
}
// SetStatusCode sets the status to the update app traefik config default response
func (o *UpdateAppTraefikConfigDefault) SetStatusCode(code int) {
o._statusCode = code
}
// WithPayload adds the payload to the update app traefik config default response
func (o *UpdateAppTraefikConfigDefault) WithPayload(payload string) *UpdateAppTraefikConfigDefault {
o.Payload = payload
return o
}
// SetPayload sets the payload to the update app traefik config default response
func (o *UpdateAppTraefikConfigDefault) SetPayload(payload string) {
o.Payload = payload
}
// WriteResponse to the client
func (o *UpdateAppTraefikConfigDefault) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {
rw.WriteHeader(o._statusCode)
payload := o.Payload
if err := producer.Produce(rw, payload); err != nil {
panic(err) // let the recovery middleware deal with this
}
}
|
alanjjenkins/awacs | awacs/glue.py | # Copyright (c) 2012-2021, <NAME> <<EMAIL>>
# All rights reserved.
#
# See LICENSE file for full license.
from .aws import Action as BaseAction
from .aws import BaseARN
service_name = "AWS Glue"
prefix = "glue"
class Action(BaseAction):
def __init__(self, action: str = None) -> None:
super().__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource: str = "", region: str = "", account: str = "") -> None:
super().__init__(
service=prefix, resource=resource, region=region, account=account
)
BatchCreatePartition = Action("BatchCreatePartition")
BatchDeleteConnection = Action("BatchDeleteConnection")
BatchDeletePartition = Action("BatchDeletePartition")
BatchDeleteTable = Action("BatchDeleteTable")
BatchDeleteTableVersion = Action("BatchDeleteTableVersion")
BatchGetCrawlers = Action("BatchGetCrawlers")
BatchGetDevEndpoints = Action("BatchGetDevEndpoints")
BatchGetJobs = Action("BatchGetJobs")
BatchGetPartition = Action("BatchGetPartition")
BatchGetTriggers = Action("BatchGetTriggers")
BatchGetWorkflows = Action("BatchGetWorkflows")
BatchStopJobRun = Action("BatchStopJobRun")
CancelMLTaskRun = Action("CancelMLTaskRun")
CheckSchemaVersionValidity = Action("CheckSchemaVersionValidity")
CreateClassifier = Action("CreateClassifier")
CreateConnection = Action("CreateConnection")
CreateCrawler = Action("CreateCrawler")
CreateDatabase = Action("CreateDatabase")
CreateDevEndpoint = Action("CreateDevEndpoint")
CreateJob = Action("CreateJob")
CreateMLTransform = Action("CreateMLTransform")
CreatePartition = Action("CreatePartition")
CreateRegistry = Action("CreateRegistry")
CreateSchema = Action("CreateSchema")
CreateScript = Action("CreateScript")
CreateSecurityConfiguration = Action("CreateSecurityConfiguration")
CreateTable = Action("CreateTable")
CreateTrigger = Action("CreateTrigger")
CreateUserDefinedFunction = Action("CreateUserDefinedFunction")
CreateWorkflow = Action("CreateWorkflow")
DeleteClassifier = Action("DeleteClassifier")
DeleteConnection = Action("DeleteConnection")
DeleteCrawler = Action("DeleteCrawler")
DeleteDatabase = Action("DeleteDatabase")
DeleteDevEndpoint = Action("DeleteDevEndpoint")
DeleteJob = Action("DeleteJob")
DeleteMLTransform = Action("DeleteMLTransform")
DeletePartition = Action("DeletePartition")
DeleteRegistry = Action("DeleteRegistry")
DeleteResourcePolicy = Action("DeleteResourcePolicy")
DeleteSchema = Action("DeleteSchema")
DeleteSchemaVersions = Action("DeleteSchemaVersions")
DeleteSecurityConfiguration = Action("DeleteSecurityConfiguration")
DeleteTable = Action("DeleteTable")
DeleteTableVersion = Action("DeleteTableVersion")
DeleteTrigger = Action("DeleteTrigger")
DeleteUserDefinedFunction = Action("DeleteUserDefinedFunction")
DeleteWorkflow = Action("DeleteWorkflow")
GetCatalogImportStatus = Action("GetCatalogImportStatus")
GetClassifier = Action("GetClassifier")
GetClassifiers = Action("GetClassifiers")
GetConnection = Action("GetConnection")
GetConnections = Action("GetConnections")
GetCrawler = Action("GetCrawler")
GetCrawlerMetrics = Action("GetCrawlerMetrics")
GetCrawlers = Action("GetCrawlers")
GetDataCatalogEncryptionSettings = Action("GetDataCatalogEncryptionSettings")
GetDatabase = Action("GetDatabase")
GetDatabases = Action("GetDatabases")
GetDataflowGraph = Action("GetDataflowGraph")
GetDevEndpoint = Action("GetDevEndpoint")
GetDevEndpoints = Action("GetDevEndpoints")
GetJob = Action("GetJob")
GetJobBookmark = Action("GetJobBookmark")
GetJobRun = Action("GetJobRun")
GetJobRuns = Action("GetJobRuns")
GetJobs = Action("GetJobs")
GetMLTaskRun = Action("GetMLTaskRun")
GetMLTaskRuns = Action("GetMLTaskRuns")
GetMLTransform = Action("GetMLTransform")
GetMLTransforms = Action("GetMLTransforms")
GetMapping = Action("GetMapping")
GetPartition = Action("GetPartition")
GetPartitions = Action("GetPartitions")
GetPlan = Action("GetPlan")
GetRegistry = Action("GetRegistry")
GetResourcePolicies = Action("GetResourcePolicies")
GetResourcePolicy = Action("GetResourcePolicy")
GetSchema = Action("GetSchema")
GetSchemaByDefinition = Action("GetSchemaByDefinition")
GetSchemaVersion = Action("GetSchemaVersion")
GetSchemaVersionsDiff = Action("GetSchemaVersionsDiff")
GetSecurityConfiguration = Action("GetSecurityConfiguration")
GetSecurityConfigurations = Action("GetSecurityConfigurations")
GetTable = Action("GetTable")
GetTableVersion = Action("GetTableVersion")
GetTableVersions = Action("GetTableVersions")
GetTables = Action("GetTables")
GetTags = Action("GetTags")
GetTrigger = Action("GetTrigger")
GetTriggers = Action("GetTriggers")
GetUserDefinedFunction = Action("GetUserDefinedFunction")
GetUserDefinedFunctions = Action("GetUserDefinedFunctions")
GetWorkflow = Action("GetWorkflow")
GetWorkflowRun = Action("GetWorkflowRun")
GetWorkflowRunProperties = Action("GetWorkflowRunProperties")
GetWorkflowRuns = Action("GetWorkflowRuns")
ImportCatalogToGlue = Action("ImportCatalogToGlue")
ListCrawlers = Action("ListCrawlers")
ListDevEndpoints = Action("ListDevEndpoints")
ListJobs = Action("ListJobs")
ListMLTransforms = Action("ListMLTransforms")
ListRegistries = Action("ListRegistries")
ListSchemaVersions = Action("ListSchemaVersions")
ListSchemas = Action("ListSchemas")
ListTriggers = Action("ListTriggers")
ListWorkflows = Action("ListWorkflows")
PutDataCatalogEncryptionSettings = Action("PutDataCatalogEncryptionSettings")
PutResourcePolicy = Action("PutResourcePolicy")
PutSchemaVersionMetadata = Action("PutSchemaVersionMetadata")
PutWorkflowRunProperties = Action("PutWorkflowRunProperties")
QuerySchemaVersionMetadata = Action("QuerySchemaVersionMetadata")
RegisterSchemaVersion = Action("RegisterSchemaVersion")
RemoveSchemaVersionMetadata = Action("RemoveSchemaVersionMetadata")
ResetJobBookmark = Action("ResetJobBookmark")
SearchTables = Action("SearchTables")
StartCrawler = Action("StartCrawler")
StartCrawlerSchedule = Action("StartCrawlerSchedule")
StartExportLabelsTaskRun = Action("StartExportLabelsTaskRun")
StartImportLabelsTaskRun = Action("StartImportLabelsTaskRun")
StartJobRun = Action("StartJobRun")
StartMLEvaluationTaskRun = Action("StartMLEvaluationTaskRun")
StartMLLabelingSetGenerationTaskRun = Action("StartMLLabelingSetGenerationTaskRun")
StartTrigger = Action("StartTrigger")
StartWorkflowRun = Action("StartWorkflowRun")
StopCrawler = Action("StopCrawler")
StopCrawlerSchedule = Action("StopCrawlerSchedule")
StopTrigger = Action("StopTrigger")
TagResource = Action("TagResource")
UntagResource = Action("UntagResource")
UpdateClassifier = Action("UpdateClassifier")
UpdateConnection = Action("UpdateConnection")
UpdateCrawler = Action("UpdateCrawler")
UpdateCrawlerSchedule = Action("UpdateCrawlerSchedule")
UpdateDatabase = Action("UpdateDatabase")
UpdateDevEndpoint = Action("UpdateDevEndpoint")
UpdateJob = Action("UpdateJob")
UpdateMLTransform = Action("UpdateMLTransform")
UpdatePartition = Action("UpdatePartition")
UpdateRegistry = Action("UpdateRegistry")
UpdateSchema = Action("UpdateSchema")
UpdateTable = Action("UpdateTable")
UpdateTrigger = Action("UpdateTrigger")
UpdateUserDefinedFunction = Action("UpdateUserDefinedFunction")
UpdateWorkflow = Action("UpdateWorkflow")
UseMLTransforms = Action("UseMLTransforms")
|
stefpetrov/Software-University | Programming Basics - Exams/task4.js | <reponame>stefpetrov/Software-University
function training(input){
let index = 0;
let days = Number(input[index]);
index++
let firstDayKm = Number(input[index]);
index++
let totalKm = firstDayKm
let finalKm = firstDayKm
for(let currentDay = 1; currentDay <= days; currentDay++){
let percent = Number(input[index]);
index++
let addKm = totalKm * percent / 100
totalKm += addKm
finalKm += totalKm
}
if(finalKm >= 1000){
console.log(`You've done a great job running ${Math.ceil(finalKm - 1000)} more kilometers!`)
}
else{
console.log(`Sorry <NAME>, you need to run ${Math.ceil(1000 - finalKm)} more kilometers`)
}
}
training(["5",
"30",
"10",
"15",
"20",
"5",
"12"]) |
GobiShanthan/Portfolio | frontend/src/MarvelApp/Components/MarvelLoader.js | import React from 'react'
import DrStrange from '../marvelPics/DrStrange.png'
import spinningRing from '../marvelPics/spinRing.png'
import './marvelCss.css'
const MarvelLoader = () => {
return (
<div className='drStrangeSpinn'>
<img src={DrStrange} alt='DrStrange' style={{maxWidth:"600px",width:'33vw'}} />
<img src={spinningRing} alt='spinningRing' className='spinningRing' />
</div>
)
}
export default MarvelLoader
|
chuckwondo/iter-tools | src/impls/$enumerate/async-enumerate.js | <filename>src/impls/$enumerate/async-enumerate.js
/**
* @generated-from ./$enumerate.js
* This file is autogenerated from a template. Please do not edit it directly.
* To rebuild it from its template use the command
* > npm run generate
* More information can be found in CONTRIBUTING.md
*/
import { asyncIterableCurry } from '../../internal/async-iterable.js';
import { __asyncMap } from '../$map/async-map.js';
export function __asyncEnumerate(source, start = 0) {
return __asyncMap(source, (value, i) => [start + i, value]);
}
export const asyncEnumerate = /*#__PURE__*/ asyncIterableCurry(__asyncEnumerate, {
minArgs: 0,
maxArgs: 1,
});
|
khandy21yo/aplus | CMC030/cmcfun/source/prnt_numberith.c | /* %TITLE "Figure out follower for a number (st,nd,...)"
*/
#pragma module prnt_numberith "V3.6 Calico"
/*
* COPYRIGHT (C) 1992 BY
* Computer Management Center, Inc.
* Idaho Falls, Idaho 83402
*
* This software is furnished under a license and may be used and
* copied only in accordance with terms of such license and with
* the inclusion of the above copyright notice. This software or
* any other copies thereof may not be provided or otherwise made
* available to any other person. No title to and ownership of
* the software is hereby transferred.
*
* The information in this software is subject to change without
* notice and should not be construed as a commitment by
* Computer Management Center, Inc.
*
* CMC assumes no responsibility for the use or reliability of
* its software on equipment which is not supported by CMC.
*
*++
*
* Abstract:HELP
* .p
* This function determines the code to stick on the
* end of a number to make it read right, (1st,2nd,
* 40897th, ...)
*
* Parameters:
*
* AMOUNT%
* A passed number (max 999,999,999.99, min 0)
*
* The return is a string of the format:
*
* st,nd,rd,th
*
* Example:
*
* PRNT_NUMBERITH(100)
*
* Compile:
*
* $ CC/G_FLOAT FUNC_SOURCE:PRNT_NUMBERITH
* $ LIB FUNC_LIB:CMC_3VECTOR/REP PRNT_NUMBERITH
* $ DELETE PRNT_NUMBERITH.OBJ;*
*
* Author:
*
* 04/09/92 - <NAME>
*
* Modification history:
*
* 07/08/93 - <NAME>
* Convert to C.
*
* 07/08/93 - <NAME>
* Fixed problen with 11-13.
*
* 04/17/95 - <NAME>
* (V3.6)
* Update to V3.6 coding standards.
*
* 10/20/95 - <NAME>
* Put into sharable library.
*
* 05/27/99 - <NAME>
* Modify so will compile in DEC-C without errors
* (module, descrip.h, str$routines.h)
*--
*/
#include <descrip.h>
#include <str$routines.h>
void prnt_numberith(struct dsc$descriptor *returnstr, long *amount)
{
if ((((*amount) % 100) >= 11) && (((*amount) % 100) <= 13))
{
str$copy_r(returnstr, &2l, &"th");
}
else
{
/*
* Determine correct ending
*/
switch ((*amount) % 10)
{
case 1:
str$copy_r(returnstr, &2, &"st");
break;
case 2:
str$copy_r(returnstr, &2l, &"nd");
break;
case 3:
str$copy_r(returnstr, &2l, &"rd");
break;
default:
str$copy_r(returnstr, &2l, &"th");
break;
}
}
}
|
tyekx/netcode | Netcode/Graphics/DX12/DX12GraphicsModuleForWin32.cpp | <reponame>tyekx/netcode
#include "DX12GraphicsModuleForWin32.h"
#include "DX12Common.h"
namespace Netcode::Graphics::DX12 {
void DX12GraphicsModuleForWin32::Start(Netcode::Module::AApp * app) {
if(app->window != nullptr) {
hwnd = reinterpret_cast<HWND>(app->window->GetUnderlyingPointer());
}
DX12GraphicsModule::Start(app);
}
void DX12GraphicsModuleForWin32::CreateSwapChain()
{
swapChainFlags |= DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH;
DXGI_SWAP_CHAIN_DESC1 swapChainDesc = { 0 };
// if you specify width/height as 0, the CreateSwapChainForHwnd will query it from the output window
swapChainDesc.Width = 0;
swapChainDesc.Height = 0;
swapChainDesc.Format = renderTargetFormat;
swapChainDesc.Stereo = false;
swapChainDesc.SampleDesc.Count = 1;
swapChainDesc.SampleDesc.Quality = 0;
swapChainDesc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
swapChainDesc.BufferCount = backbufferDepth;
swapChainDesc.Scaling = DXGI_SCALING_NONE;
swapChainDesc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_DISCARD;
swapChainDesc.AlphaMode = DXGI_ALPHA_MODE_UNSPECIFIED;
swapChainDesc.Flags = swapChainFlags;
com_ptr<IDXGISwapChain1> tempSwapChain;
DX_API("Failed to create swap chain for hwnd")
dxgiFactory->CreateSwapChainForHwnd(commandQueue.Get(), hwnd, &swapChainDesc, nullptr, nullptr, &tempSwapChain);
DX_API("Failed to cast swap chain")
tempSwapChain.As(&swapChain);
DX_API("Failed to make window association")
dxgiFactory->MakeWindowAssociation(hwnd, DXGI_MWA_NO_ALT_ENTER);
DX_API("Failed to set background color")
swapChain->SetBackgroundColor(&clearColor);
DXGI_SWAP_CHAIN_DESC1 scDesc;
DX_API("failed to get swap chain desc")
swapChain->GetDesc1(&scDesc);
width = scDesc.Width;
height = scDesc.Height;
UpdateViewport();
}
}
|
MontyThibault/centre-of-mass-awareness | Cartwheel/lib/Python26/Lib/site-packages/OpenGL/raw/GL/NV/texture_shader.py | <reponame>MontyThibault/centre-of-mass-awareness<gh_stars>0
'''OpenGL extension NV.texture_shader
Overview (from the spec)
Standard OpenGL and the ARB_multitexture extension define a
straightforward direct mechanism for mapping sets of texture
coordinates to filtered colors. This extension provides a more
functional mechanism.
OpenGL's standard texturing mechanism defines a set of texture
targets. Each texture target defines how the texture image
is specified and accessed via a set of texture coordinates.
OpenGL 1.0 defines the 1D and 2D texture targets. OpenGL 1.2
(and/or the EXT_texture3D extension) defines the 3D texture target.
The ARB_texture_cube_map extension defines the cube map texture
target. Each texture unit's texture coordinate set is mapped to a
color using the unit's highest priority enabled texture target.
This extension introduces texture shader stages. A sequence of
texture shader stages provides a more flexible mechanism for mapping
sets of texture coordinates to texture unit RGBA results than standard
OpenGL.
When the texture shader enable is on, the extension replaces the
conventional OpenGL mechanism for mapping sets of texture coordinates
to filtered colors with this extension's sequence of texture shader
stages.
Each texture shader stage runs one of 21 canned texture shader
programs. These programs support conventional OpenGL texture
mapping but also support dependent texture accesses, dot product
texture programs, and special modes. (3D texture mapping
texture shader operations are NOT provided by this extension;
3D texture mapping texture shader operations are added by the
NV_texture_shader2 extension that is layered on this extension.
See the NV_texture_shader2 specification.)
To facilitate the new texture shader programs, this extension
introduces several new texture formats and variations on existing
formats. Existing color texture formats are extended by introducing
new signed variants. Two new types of texture formats (beyond colors)
are also introduced. Texture offset groups encode two signed offsets,
and optionally a magnitude or a magnitude and an intensity. The new
HILO (pronounced high-low) formats provide possibly signed, high
precision (16-bit) two-component textures.
Each program takes as input the stage's interpolated texture
coordinate set (s,t,r,q). Each program generates two results:
a shader stage result that may be used as an input to subsequent
shader stage programs, and a texture unit RGBA result that becomes the
texture color used by the texture unit's texture environment function
or becomes the initial value for the corresponding texture register
for register combiners. The texture unit RGBA result is always
an RGBA color, but the shader stage result may be one of an RGBA
color, a HILO value, a texture offset group, a floating-point value,
or an invalid result. When both results are RGBA colors, the shader
stage result and the texture unit RGBA result are usually identical
(though not in all cases).
Additionally, certain programs have a side-effect such as culling
the fragment or replacing the fragment's depth value.
The twenty-one programs are briefly described:
<none>
1. NONE - Always generates a (0,0,0,0) texture unit RGBA result.
Equivalent to disabling all texture targets in conventional
OpenGL.
<conventional textures>
2. TEXTURE_1D - Accesses a 1D texture via (s/q).
3. TEXTURE_2D - Accesses a 2D texture via (s/q,t/q).
4. TEXTURE_RECTANGLE_NV - Accesses a rectangular texture via (s/q,t/q).
5. TEXTURE_CUBE_MAP_ARB - Accesses a cube map texture via (s,t,r).
<special modes>
6. PASS_THROUGH_NV - Converts a texture coordinate (s,t,r,q)
directly to a [0,1] clamped (r,g,b,a) texture unit RGBA result.
7. CULL_FRAGMENT_NV - Culls the fragment based on the whether each
(s,t,r,q) is "greater than or equal to zero" or "less than zero".
<offset textures>
8. OFFSET_TEXTURE_2D_NV - Transforms the signed (ds,dt) components
of a previous texture unit by a 2x2 floating-point matrix and
then uses the result to offset the stage's texture coordinates
for a 2D non-projective texture.
9. OFFSET_TEXTURE_2D_SCALE_NV - Same as above except the magnitude
component of the previous texture unit result scales the red,
green, and blue components of the unsigned RGBA texture 2D
access.
10. OFFSET_TEXTURE_RECTANGLE_NV - Similar to OFFSET_TEXTURE_2D_NV
except that the texture access is into a rectangular
non-projective texture.
11. OFFSET_TEXTURE_RECTANGLE_SCALE_NV - Similar to
OFFSET_TEXTURE_2D_SCALE_NV except that the texture access is
into a rectangular non-projective texture.
<dependent textures>
12. DEPENDENT_AR_TEXTURE_2D_NV - Converts the alpha and red
components of a previous shader result into an (s,t) texture
coordinate set to access a 2D non-projective texture.
13. DEPENDENT_GB_TEXTURE_2D_NV - Converts the green and blue
components of a previous shader result into an (s,t) texture
coordinate set to access a 2D non-projective texture.
<dot product textures>
14. DOT_PRODUCT_NV - Computes the dot product of the texture
shader's texture coordinate set (s,t,r) with some mapping of the
components of a previous texture shader result. The component
mapping depends on the type (RGBA or HILO) and signedness of
the stage's previous texture input. Other dot product texture
programs use the result of this program to compose a texture
coordinate set for a dependent texture access. The color result
is undefined.
15. DOT_PRODUCT_TEXTURE_2D_NV - When preceded by a DOT_PRODUCT_NV
program in the previous texture shader stage, computes a second
similar dot product and composes the two dot products into (s,t)
texture coordinate set to access a 2D non-projective texture.
16. DOT_PRODUCT_TEXTURE_RECTANGLE_NV - Similar to
DOT_PRODUCT_TEXTURE_2D_NV except that the texture acces is into
a rectangular non-projective texture.
17. DOT_PRODUCT_TEXTURE_CUBE_MAP_NV - When preceded by two
DOT_PRODUCT_NV programs in the previous two texture shader
stages, computes a third similar dot product and composes the
three dot products into (s,t,r) texture coordinate set to access
a cube map texture.
18. DOT_PRODUCT_REFLECT_CUBE_MAP_NV - When preceded by two
DOT_PRODUCT_NV programs in the previous two texture shader
stages, computes a third similar dot product and composes the
three dot products into a normal vector (Nx,Ny,Nz). An eye
vector (Ex,Ey,Ez) is composed from the q texture coordinates of
the three stages. A reflection vector (Rx,Ry,Rz) is computed
based on the normal and eye vectors. The reflection vector
forms an (s,t,r) texture coordinate set to access a cube map
texture.
19. DOT_PRODUCT_CONST_EYE_REFLECT_CUBE_MAP_NV - Operates like
DOT_PRODUCT_REFLECT_CUBE_MAP_NV except that the eye vector
(Ex,Ey,Ez) is a user-defined constant rather than composed from
the q coordinates of the three stages.
20. DOT_PRODUCT_DIFFUSE_CUBE_MAP_NV - When used instead of the second
DOT_PRODUCT_NV program preceding
a DOT_PRODUCT_REFLECT_CUBE_MAP_NV or
DOT_PRODUCT_CONST_EYE_REFLECT_CUBE_MAP_NV stage, the normal
vector forms an (s,t,r) texture coordinate set to access a
cube map texture.
<dot product depth replace>
21. DOT_PRODUCT_DEPTH_REPLACE_NV - When preceded by a DOT_PRODUCT_NV
program in the previous texture shader stage, computes a second
similar dot product and replaces the fragment's window-space
depth value with the first dot product results divided by
the second. The texture unit RGBA result is (0,0,0,0).
The official definition of this extension is available here:
http://oss.sgi.com/projects/ogl-sample/registry/NV/texture_shader.txt
Automatically generated by the get_gl_extensions script, do not edit!
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions
from OpenGL.GL import glget
import ctypes
EXTENSION_NAME = 'GL_NV_texture_shader'
GL_OFFSET_TEXTURE_RECTANGLE_NV = constant.Constant( 'GL_OFFSET_TEXTURE_RECTANGLE_NV', 0x864C )
GL_OFFSET_TEXTURE_RECTANGLE_SCALE_NV = constant.Constant( 'GL_OFFSET_TEXTURE_RECTANGLE_SCALE_NV', 0x864D )
GL_DOT_PRODUCT_TEXTURE_RECTANGLE_NV = constant.Constant( 'GL_DOT_PRODUCT_TEXTURE_RECTANGLE_NV', 0x864E )
GL_RGBA_UNSIGNED_DOT_PRODUCT_MAPPING_NV = constant.Constant( 'GL_RGBA_UNSIGNED_DOT_PRODUCT_MAPPING_NV', 0x86D9 )
GL_UNSIGNED_INT_S8_S8_8_8_NV = constant.Constant( 'GL_UNSIGNED_INT_S8_S8_8_8_NV', 0x86DA )
GL_UNSIGNED_INT_8_8_S8_S8_REV_NV = constant.Constant( 'GL_UNSIGNED_INT_8_8_S8_S8_REV_NV', 0x86DB )
GL_DSDT_MAG_INTENSITY_NV = constant.Constant( 'GL_DSDT_MAG_INTENSITY_NV', 0x86DC )
GL_SHADER_CONSISTENT_NV = constant.Constant( 'GL_SHADER_CONSISTENT_NV', 0x86DD )
GL_TEXTURE_SHADER_NV = constant.Constant( 'GL_TEXTURE_SHADER_NV', 0x86DE )
glget.addGLGetConstant( GL_TEXTURE_SHADER_NV, (1,) )
GL_SHADER_OPERATION_NV = constant.Constant( 'GL_SHADER_OPERATION_NV', 0x86DF )
GL_CULL_MODES_NV = constant.Constant( 'GL_CULL_MODES_NV', 0x86E0 )
GL_OFFSET_TEXTURE_MATRIX_NV = constant.Constant( 'GL_OFFSET_TEXTURE_MATRIX_NV', 0x86E1 )
GL_OFFSET_TEXTURE_SCALE_NV = constant.Constant( 'GL_OFFSET_TEXTURE_SCALE_NV', 0x86E2 )
GL_OFFSET_TEXTURE_BIAS_NV = constant.Constant( 'GL_OFFSET_TEXTURE_BIAS_NV', 0x86E3 )
GL_PREVIOUS_TEXTURE_INPUT_NV = constant.Constant( 'GL_PREVIOUS_TEXTURE_INPUT_NV', 0x86E4 )
GL_CONST_EYE_NV = constant.Constant( 'GL_CONST_EYE_NV', 0x86E5 )
GL_PASS_THROUGH_NV = constant.Constant( 'GL_PASS_THROUGH_NV', 0x86E6 )
GL_CULL_FRAGMENT_NV = constant.Constant( 'GL_CULL_FRAGMENT_NV', 0x86E7 )
GL_OFFSET_TEXTURE_2D_NV = constant.Constant( 'GL_OFFSET_TEXTURE_2D_NV', 0x86E8 )
GL_DEPENDENT_AR_TEXTURE_2D_NV = constant.Constant( 'GL_DEPENDENT_AR_TEXTURE_2D_NV', 0x86E9 )
GL_DEPENDENT_GB_TEXTURE_2D_NV = constant.Constant( 'GL_DEPENDENT_GB_TEXTURE_2D_NV', 0x86EA )
GL_DOT_PRODUCT_NV = constant.Constant( 'GL_DOT_PRODUCT_NV', 0x86EC )
GL_DOT_PRODUCT_DEPTH_REPLACE_NV = constant.Constant( 'GL_DOT_PRODUCT_DEPTH_REPLACE_NV', 0x86ED )
GL_DOT_PRODUCT_TEXTURE_2D_NV = constant.Constant( 'GL_DOT_PRODUCT_TEXTURE_2D_NV', 0x86EE )
GL_DOT_PRODUCT_TEXTURE_CUBE_MAP_NV = constant.Constant( 'GL_DOT_PRODUCT_TEXTURE_CUBE_MAP_NV', 0x86F0 )
GL_DOT_PRODUCT_DIFFUSE_CUBE_MAP_NV = constant.Constant( 'GL_DOT_PRODUCT_DIFFUSE_CUBE_MAP_NV', 0x86F1 )
GL_DOT_PRODUCT_REFLECT_CUBE_MAP_NV = constant.Constant( 'GL_DOT_PRODUCT_REFLECT_CUBE_MAP_NV', 0x86F2 )
GL_DOT_PRODUCT_CONST_EYE_REFLECT_CUBE_MAP_NV = constant.Constant( 'GL_DOT_PRODUCT_CONST_EYE_REFLECT_CUBE_MAP_NV', 0x86F3 )
GL_HILO_NV = constant.Constant( 'GL_HILO_NV', 0x86F4 )
GL_DSDT_NV = constant.Constant( 'GL_DSDT_NV', 0x86F5 )
GL_DSDT_MAG_NV = constant.Constant( 'GL_DSDT_MAG_NV', 0x86F6 )
GL_DSDT_MAG_VIB_NV = constant.Constant( 'GL_DSDT_MAG_VIB_NV', 0x86F7 )
GL_HILO16_NV = constant.Constant( 'GL_HILO16_NV', 0x86F8 )
GL_SIGNED_HILO_NV = constant.Constant( 'GL_SIGNED_HILO_NV', 0x86F9 )
GL_SIGNED_HILO16_NV = constant.Constant( 'GL_SIGNED_HILO16_NV', 0x86FA )
GL_SIGNED_RGBA_NV = constant.Constant( 'GL_SIGNED_RGBA_NV', 0x86FB )
GL_SIGNED_RGBA8_NV = constant.Constant( 'GL_SIGNED_RGBA8_NV', 0x86FC )
GL_SIGNED_RGB_NV = constant.Constant( 'GL_SIGNED_RGB_NV', 0x86FE )
GL_SIGNED_RGB8_NV = constant.Constant( 'GL_SIGNED_RGB8_NV', 0x86FF )
GL_SIGNED_LUMINANCE_NV = constant.Constant( 'GL_SIGNED_LUMINANCE_NV', 0x8701 )
GL_SIGNED_LUMINANCE8_NV = constant.Constant( 'GL_SIGNED_LUMINANCE8_NV', 0x8702 )
GL_SIGNED_LUMINANCE_ALPHA_NV = constant.Constant( 'GL_SIGNED_LUMINANCE_ALPHA_NV', 0x8703 )
GL_SIGNED_LUMINANCE8_ALPHA8_NV = constant.Constant( 'GL_SIGNED_LUMINANCE8_ALPHA8_NV', 0x8704 )
GL_SIGNED_ALPHA_NV = constant.Constant( 'GL_SIGNED_ALPHA_NV', 0x8705 )
GL_SIGNED_ALPHA8_NV = constant.Constant( 'GL_SIGNED_ALPHA8_NV', 0x8706 )
GL_SIGNED_INTENSITY_NV = constant.Constant( 'GL_SIGNED_INTENSITY_NV', 0x8707 )
GL_SIGNED_INTENSITY8_NV = constant.Constant( 'GL_SIGNED_INTENSITY8_NV', 0x8708 )
GL_DSDT8_NV = constant.Constant( 'GL_DSDT8_NV', 0x8709 )
GL_DSDT8_MAG8_NV = constant.Constant( 'GL_DSDT8_MAG8_NV', 0x870A )
GL_DSDT8_MAG8_INTENSITY8_NV = constant.Constant( 'GL_DSDT8_MAG8_INTENSITY8_NV', 0x870B )
GL_SIGNED_RGB_UNSIGNED_ALPHA_NV = constant.Constant( 'GL_SIGNED_RGB_UNSIGNED_ALPHA_NV', 0x870C )
GL_SIGNED_RGB8_UNSIGNED_ALPHA8_NV = constant.Constant( 'GL_SIGNED_RGB8_UNSIGNED_ALPHA8_NV', 0x870D )
GL_HI_SCALE_NV = constant.Constant( 'GL_HI_SCALE_NV', 0x870E )
glget.addGLGetConstant( GL_HI_SCALE_NV, (1,) )
GL_LO_SCALE_NV = constant.Constant( 'GL_LO_SCALE_NV', 0x870F )
glget.addGLGetConstant( GL_LO_SCALE_NV, (1,) )
GL_DS_SCALE_NV = constant.Constant( 'GL_DS_SCALE_NV', 0x8710 )
glget.addGLGetConstant( GL_DS_SCALE_NV, (1,) )
GL_DT_SCALE_NV = constant.Constant( 'GL_DT_SCALE_NV', 0x8711 )
glget.addGLGetConstant( GL_DT_SCALE_NV, (1,) )
GL_MAGNITUDE_SCALE_NV = constant.Constant( 'GL_MAGNITUDE_SCALE_NV', 0x8712 )
glget.addGLGetConstant( GL_MAGNITUDE_SCALE_NV, (1,) )
GL_VIBRANCE_SCALE_NV = constant.Constant( 'GL_VIBRANCE_SCALE_NV', 0x8713 )
glget.addGLGetConstant( GL_VIBRANCE_SCALE_NV, (1,) )
GL_HI_BIAS_NV = constant.Constant( 'GL_HI_BIAS_NV', 0x8714 )
glget.addGLGetConstant( GL_HI_BIAS_NV, (1,) )
GL_LO_BIAS_NV = constant.Constant( 'GL_LO_BIAS_NV', 0x8715 )
glget.addGLGetConstant( GL_LO_BIAS_NV, (1,) )
GL_DS_BIAS_NV = constant.Constant( 'GL_DS_BIAS_NV', 0x8716 )
glget.addGLGetConstant( GL_DS_BIAS_NV, (1,) )
GL_DT_BIAS_NV = constant.Constant( 'GL_DT_BIAS_NV', 0x8717 )
glget.addGLGetConstant( GL_DT_BIAS_NV, (1,) )
GL_MAGNITUDE_BIAS_NV = constant.Constant( 'GL_MAGNITUDE_BIAS_NV', 0x8718 )
glget.addGLGetConstant( GL_MAGNITUDE_BIAS_NV, (1,) )
GL_VIBRANCE_BIAS_NV = constant.Constant( 'GL_VIBRANCE_BIAS_NV', 0x8719 )
glget.addGLGetConstant( GL_VIBRANCE_BIAS_NV, (1,) )
GL_TEXTURE_BORDER_VALUES_NV = constant.Constant( 'GL_TEXTURE_BORDER_VALUES_NV', 0x871A )
GL_TEXTURE_HI_SIZE_NV = constant.Constant( 'GL_TEXTURE_HI_SIZE_NV', 0x871B )
GL_TEXTURE_LO_SIZE_NV = constant.Constant( 'GL_TEXTURE_LO_SIZE_NV', 0x871C )
GL_TEXTURE_DS_SIZE_NV = constant.Constant( 'GL_TEXTURE_DS_SIZE_NV', 0x871D )
GL_TEXTURE_DT_SIZE_NV = constant.Constant( 'GL_TEXTURE_DT_SIZE_NV', 0x871E )
GL_TEXTURE_MAG_SIZE_NV = constant.Constant( 'GL_TEXTURE_MAG_SIZE_NV', 0x871F )
def glInitTextureShaderNV():
'''Return boolean indicating whether this extension is available'''
return extensions.hasGLExtension( EXTENSION_NAME )
|
dannegm/neodrive | packages/neo-client/src/common/icons/outline/FacebookOutline.js | <gh_stars>1-10
import React from 'react';
import SvgIcon from '../../components/SvgIcon';
const FacebookOutline = (props) => (
<SvgIcon {...props}>
<g>
<path d="M13 22H9a1 1 0 01-1-1v-6.2H6a1 1 0 01-1-1v-3.6a1 1 0 011-1h2V7.5A5.77 5.77 0 0114 2h3a1 1 0 011 1v3.6a1 1 0 01-1 1h-3v1.6h3a1 1 0 01.8.39 1 1 0 01.16.88l-1 3.6a1 1 0 01-1 .73H14V21a1 1 0 01-1 1zm-3-2h2v-6.2a1 1 0 011-1h2.24l.44-1.6H13a1 1 0 01-1-1V7.5a2 2 0 012-1.9h2V4h-2a3.78 3.78 0 00-4 3.5v2.7a1 1 0 01-1 1H7v1.6h2a1 1 0 011 1z" />
</g>
</SvgIcon>
);
export default FacebookOutline;
|
ChristopheCVB/EliteDangerousAPI | src/main/java/com/christophecvb/elitedangerous/events/exploration/SAASignalsFoundEvent.java | <filename>src/main/java/com/christophecvb/elitedangerous/events/exploration/SAASignalsFoundEvent.java
package com.christophecvb.elitedangerous.events.exploration;
import com.christophecvb.elitedangerous.events.Event;
import com.christophecvb.elitedangerous.models.SAASignal;
import java.util.List;
public class SAASignalsFoundEvent extends Event {
public Long systemAddress;
public String bodyName;
public Integer bodyID;
public List<SAASignal> signals;
public interface Listener extends Event.Listener {
@Override
default <T extends Event> void onTriggered(T event) {
this.onSAAScanCompleteEventTriggered((SAASignalsFoundEvent) event);
}
void onSAAScanCompleteEventTriggered(SAASignalsFoundEvent navRouteEvent);
}
}
|
BantorSchwanzVor/plotscanner-leak | org/apache/commons/net/examples/cidr/SubnetUtilsExample.java | package org.apache.commons.net.examples.cidr;
import java.util.Arrays;
import java.util.Scanner;
import org.apache.commons.net.util.SubnetUtils;
public class SubnetUtilsExample {
public static void main(String[] args) {
String subnet = "192.168.0.3/31";
SubnetUtils utils = new SubnetUtils(subnet);
SubnetUtils.SubnetInfo info = utils.getInfo();
System.out.printf("Subnet Information for %s:%n", new Object[] { subnet });
System.out.println("--------------------------------------");
System.out.printf("IP Address:\t\t\t%s\t[%s]%n", new Object[] { info.getAddress(),
Integer.toBinaryString(info.asInteger(info.getAddress())) });
System.out.printf("Netmask:\t\t\t%s\t[%s]%n", new Object[] { info.getNetmask(),
Integer.toBinaryString(info.asInteger(info.getNetmask())) });
System.out.printf("CIDR Representation:\t\t%s%n%n", new Object[] { info.getCidrSignature() });
System.out.printf("Supplied IP Address:\t\t%s%n%n", new Object[] { info.getAddress() });
System.out.printf("Network Address:\t\t%s\t[%s]%n", new Object[] { info.getNetworkAddress(),
Integer.toBinaryString(info.asInteger(info.getNetworkAddress())) });
System.out.printf("Broadcast Address:\t\t%s\t[%s]%n", new Object[] { info.getBroadcastAddress(),
Integer.toBinaryString(info.asInteger(info.getBroadcastAddress())) });
System.out.printf("Low Address:\t\t\t%s\t[%s]%n", new Object[] { info.getLowAddress(),
Integer.toBinaryString(info.asInteger(info.getLowAddress())) });
System.out.printf("High Address:\t\t\t%s\t[%s]%n", new Object[] { info.getHighAddress(),
Integer.toBinaryString(info.asInteger(info.getHighAddress())) });
System.out.printf("Total usable addresses: \t%d%n", new Object[] { Long.valueOf(info.getAddressCountLong()) });
System.out.printf("Address List: %s%n%n", new Object[] { Arrays.toString((Object[])info.getAllAddresses()) });
String prompt = "Enter an IP address (e.g. 192.168.0.10):";
System.out.println("Enter an IP address (e.g. 192.168.0.10):");
Scanner scanner = new Scanner(System.in);
while (scanner.hasNextLine()) {
String address = scanner.nextLine();
System.out.println("The IP address [" + address + "] is " + (
info.isInRange(address) ? "" : "not ") +
"within the subnet [" + subnet + "]");
System.out.println("Enter an IP address (e.g. 192.168.0.10):");
}
scanner.close();
}
}
/* Location: C:\Users\BSV\AppData\Local\Temp\Rar$DRa6216.20396\Preview\Preview.jar!\org\apache\commons\net\examples\cidr\SubnetUtilsExample.class
* Java compiler version: 8 (52.0)
* JD-Core Version: 1.1.3
*/ |
CoderHam/WebScraper | node_modules/webdriverio/test/spec/functional/waitUntil.js | var q = require('q');
/**
* breaks on travis
*/
describe.skip('waitUntil', function() {
before(h.setup());
it('should pass', function() {
var defer = q.defer();
setTimeout(function() {
defer.resolve('foobar');
}, 500);
return this.client.waitUntil(function() {
return defer.promise;
}, 1000).then(function(res) {
res.should.be.equal('foobar');
});
});
it('should fail', function() {
var defer = q.defer();
setTimeout(function() {
defer.resolve(false);
}, 500);
return this.client.waitUntil(function() {
return defer.promise;
}, 1000).catch(function(err) {
err.message.should.match(/Promise never resolved with an truthy value/);
});
});
it('should get rejected', function() {
var defer = q.defer();
setTimeout(function() {
defer.reject('foobar');
}, 500);
return this.client.waitUntil(function() {
return defer.promise;
}, 1000).catch(function(err) {
err.message.should.match(/Promise was fulfilled but got rejected/);
});
});
it('should timeout', function() {
var defer = q.defer();
setTimeout(function() {
defer.resolve('foobar');
}, 1000);
return this.client.waitUntil(function() {
return defer.promise;
}, 500).catch(function(err) {
err.message.should.match(/Promise never resolved with an truthy value/);
});
});
it('should pass fast with a short waitfor interval', function() {
var defer = q.defer();
setTimeout(function() {
defer.resolve('foobar');
}, 50);
return this.client.waitUntil(function() {
return defer.promise;
}, 100, 20).then(function(res) {
res.should.be.equal('foobar');
});
});
it('should timeout with a long waitfor interval', function() {
var defer = q.defer();
setTimeout(function() {
defer.resolve('foobar');
}, 50);
return this.client.waitUntil(function() {
return defer.promise;
}, 100, 250).catch(function(err) {
err.message.should.match(/Promise never resolved with an truthy value/);
});
});
it('should allow a promise condition', function() {
var defer = q.defer();
setTimeout(function() {
defer.resolve('foobar');
}, 500);
return this.client.waitUntil(defer.promise, 1000).then(function(res) {
res.should.be.equal('foobar');
});
});
it('should check a condition multiple times', function() {
var defer = q.defer(),
flag = false,
conditionCalledCount = 0,
testCondition = function () {
conditionCalledCount += 1;
return q(flag);
};
setTimeout(function() {
flag = 'foobar';
}, 50);
return this.client.waitUntil(testCondition, 100, 20).then(function(res) {
res.should.equal('foobar');
conditionCalledCount.should.equal(4);
});
});
});
|
alokmenghrajani/adventofcode2021 | day01/day01_test.go | package day01
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestPart1(t *testing.T) {
r := Part1(`199
200
208
210
200
207
240
269
260
263`)
assert.Equal(t, 7, r)
}
func TestPart2(t *testing.T) {
r := Part2(`199
200
208
210
200
207
240
269
260
263`)
assert.Equal(t, 5, r)
}
|
bartboy011/eventsourcing | eventsourcing/tests/test_process_with_django.py | <gh_stars>1-10
import django
from django.db import models
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from eventsourcing.application.django import DjangoApplication
from eventsourcing.tests.sequenced_item_tests.test_django_record_manager import (
DjangoTestCase,
)
from eventsourcing.tests.test_process import TestProcessApplication
class TestProcessWithDjango(DjangoTestCase, TestProcessApplication):
infrastructure_class = DjangoApplication
def test_projection_into_custom_orm_obj(self):
super(TestProcessWithDjango, self).test_projection_into_custom_orm_obj()
def define_projection_record_class(self):
class ProjectionRecord(models.Model):
uid = models.BigAutoField(primary_key=True)
# Sequence ID (e.g. an entity or aggregate ID).
projection_id = models.UUIDField()
# State of the item (serialized dict, possibly encrypted).
state = models.TextField()
class Meta:
db_table = "projections"
app_label = "projections"
managed = False
self.projection_record_class = ProjectionRecord
def setup_projections_table(self, process):
from django.db import connections
with connections["default"].schema_editor() as schema_editor:
assert isinstance(schema_editor, BaseDatabaseSchemaEditor)
try:
schema_editor.delete_model(self.projection_record_class)
except django.db.utils.ProgrammingError:
pass
with connections["default"].schema_editor() as schema_editor:
assert isinstance(schema_editor, BaseDatabaseSchemaEditor)
schema_editor.create_model(self.projection_record_class)
def get_projection_record(
self, projection_record_class, projection_id, record_manager
):
try:
return projection_record_class.objects.get(projection_id=projection_id)
except projection_record_class.DoesNotExist:
return None
del TestProcessApplication
|
RitaGafni/artnet-cards-ui | src/components/BasicSearch.js | import React from 'react';
import { TextField } from '@mui/material';
export default function BasicSearch({ searchQ, setSearchQ, category }) {
return (
<div className='basic-search'>
<TextField
fullWidth
size='small'
id='basic search'
label={`Search ${category}`}
variant='outlined'
margin='normal'
value={searchQ}
onChange={(e) => setSearchQ(e.target.value)}
/>
</div>
);
}
|
m-m-m/marshal | impl/tvm-xml/src/main/java/module-info.java | <gh_stars>0
/*
* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0
*/
/**
* Provides an implementation of {@code mmm-marshall} for XML based on StAX.
*
* @provides io.github.mmm.marshall.StructuredFormatProvider
*/
module io.github.mmm.marshall.tvm.xml {
requires transitive io.github.mmm.marshall;
requires transitive org.teavm.jso;
requires transitive org.teavm.jso.apis;
exports io.github.mmm.marshall.tvm.xml;
provides io.github.mmm.marshall.StructuredFormatProvider with //
io.github.mmm.marshall.tvm.xml.TvmXmlFormatProvider;
}
|
hugovk/SeleniumLibrary | src/SeleniumLibrary/utils/events/__init__.py | <gh_stars>100-1000
# Copyright 2008-2011 Nokia Networks
# Copyright 2011-2016 <NAME>, <NAME> and contributors
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .scope_event import ScopeStart, ScopeEnd
__all__ = ["on", "dispatch", "register_event"]
_registered_events = [ScopeStart, ScopeEnd]
_events = []
def on(event_name, *args, **kwargs):
for event in _registered_events:
if event.name == event_name:
_events.append(event(*args, **kwargs))
return
def dispatch(event_name, *args, **kwargs):
for event in _events:
if event.name == event_name:
event.trigger(*args, **kwargs)
def register_event(event):
for registered_event in _registered_events:
if event.name == registered_event.name:
raise AttributeError(
"An event with the name " + event.name + " already exists."
)
_registered_events.append(event)
|
EsupPortail/esup-ecandidat | src/main/java/fr/univlorraine/ecandidat/MainUI.java | /**
* ESUP-Portail eCandidat - Copyright (c) 2016 ESUP-Portail consortium
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.univlorraine.ecandidat;
import java.io.EOFException;
import java.net.SocketTimeoutException;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import org.apache.chemistry.opencmis.commons.exceptions.CmisRuntimeException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.bridge.SLF4JBridgeHandler;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.authentication.AuthenticationCredentialsNotFoundException;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.userdetails.UserDetails;
import com.vaadin.annotations.Push;
import com.vaadin.annotations.Theme;
import com.vaadin.navigator.View;
import com.vaadin.navigator.ViewChangeListener;
import com.vaadin.navigator.ViewProvider;
import com.vaadin.server.FontAwesome;
import com.vaadin.server.Page;
import com.vaadin.server.Responsive;
import com.vaadin.server.ThemeResource;
import com.vaadin.server.UploadException;
import com.vaadin.server.VaadinRequest;
import com.vaadin.server.VaadinSession;
import com.vaadin.shared.communication.PushMode;
import com.vaadin.shared.ui.ui.Transport;
import com.vaadin.spring.annotation.SpringUI;
import com.vaadin.spring.navigator.SpringViewProvider;
import com.vaadin.ui.CssLayout;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.Label;
import com.vaadin.ui.Notification;
import com.vaadin.ui.UI;
import com.vaadin.ui.UIDetachedException;
import com.vaadin.ui.VerticalLayout;
import com.vaadin.ui.themes.ValoTheme;
import fr.univlorraine.ecandidat.controllers.AlertSvaController;
import fr.univlorraine.ecandidat.controllers.CandidatController;
import fr.univlorraine.ecandidat.controllers.I18nController;
import fr.univlorraine.ecandidat.controllers.LoadBalancingController;
import fr.univlorraine.ecandidat.controllers.LockCandidatController;
import fr.univlorraine.ecandidat.controllers.ParametreController;
import fr.univlorraine.ecandidat.controllers.TableRefController;
import fr.univlorraine.ecandidat.controllers.TagController;
import fr.univlorraine.ecandidat.controllers.UiController;
import fr.univlorraine.ecandidat.controllers.UserController;
import fr.univlorraine.ecandidat.entities.ecandidat.CentreCandidature;
import fr.univlorraine.ecandidat.entities.ecandidat.Commission;
import fr.univlorraine.ecandidat.entities.ecandidat.DroitProfilFonc;
import fr.univlorraine.ecandidat.services.security.SecurityCentreCandidature;
import fr.univlorraine.ecandidat.services.security.SecurityCommission;
import fr.univlorraine.ecandidat.utils.ConstanteUtils;
import fr.univlorraine.ecandidat.utils.MethodUtils;
import fr.univlorraine.ecandidat.utils.NomenclatureUtils;
import fr.univlorraine.ecandidat.utils.UIException;
import fr.univlorraine.ecandidat.vaadin.components.OneClickButton;
import fr.univlorraine.ecandidat.vaadin.components.ReloadViewNavigator;
import fr.univlorraine.ecandidat.vaadin.menu.AccordionItemMenu;
import fr.univlorraine.ecandidat.vaadin.menu.AccordionMenu;
import fr.univlorraine.ecandidat.vaadin.menu.Menu;
import fr.univlorraine.ecandidat.vaadin.menu.SubMenu;
import fr.univlorraine.ecandidat.vaadin.menu.SubMenuBar;
import fr.univlorraine.ecandidat.views.AccueilView;
import fr.univlorraine.ecandidat.views.AdminBatchView;
import fr.univlorraine.ecandidat.views.AdminCacheView;
import fr.univlorraine.ecandidat.views.AdminDroitProfilIndView;
import fr.univlorraine.ecandidat.views.AdminLangueView;
import fr.univlorraine.ecandidat.views.AdminLockCandidatView;
import fr.univlorraine.ecandidat.views.AdminOpiView;
import fr.univlorraine.ecandidat.views.AdminParametreView;
import fr.univlorraine.ecandidat.views.AdminVersionView;
import fr.univlorraine.ecandidat.views.AdminView;
import fr.univlorraine.ecandidat.views.AssistanceView;
import fr.univlorraine.ecandidat.views.CandidatAdminView;
import fr.univlorraine.ecandidat.views.CandidatAdresseView;
import fr.univlorraine.ecandidat.views.CandidatBacView;
import fr.univlorraine.ecandidat.views.CandidatCandidaturesView;
import fr.univlorraine.ecandidat.views.CandidatCompteMinimaView;
import fr.univlorraine.ecandidat.views.CandidatCreerCompteView;
import fr.univlorraine.ecandidat.views.CandidatCursusExterneView;
import fr.univlorraine.ecandidat.views.CandidatCursusInterneView;
import fr.univlorraine.ecandidat.views.CandidatFormationProView;
import fr.univlorraine.ecandidat.views.CandidatInfoPersoView;
import fr.univlorraine.ecandidat.views.CandidatStageView;
import fr.univlorraine.ecandidat.views.CommissionCandidatureView;
import fr.univlorraine.ecandidat.views.CommissionParametreView;
import fr.univlorraine.ecandidat.views.CtrCandCandidatureArchivedView;
import fr.univlorraine.ecandidat.views.CtrCandCandidatureCanceledView;
import fr.univlorraine.ecandidat.views.CtrCandCandidatureView;
import fr.univlorraine.ecandidat.views.CtrCandCommissionView;
import fr.univlorraine.ecandidat.views.CtrCandFormationView;
import fr.univlorraine.ecandidat.views.CtrCandFormulaireCommunView;
import fr.univlorraine.ecandidat.views.CtrCandFormulaireView;
import fr.univlorraine.ecandidat.views.CtrCandMailTypeDecView;
import fr.univlorraine.ecandidat.views.CtrCandMotivAvisView;
import fr.univlorraine.ecandidat.views.CtrCandParametreView;
import fr.univlorraine.ecandidat.views.CtrCandPieceJustifCommunView;
import fr.univlorraine.ecandidat.views.CtrCandPieceJustifView;
import fr.univlorraine.ecandidat.views.CtrCandStatCommView;
import fr.univlorraine.ecandidat.views.CtrCandStatFormView;
import fr.univlorraine.ecandidat.views.CtrCandTagView;
import fr.univlorraine.ecandidat.views.CtrCandTypeDecisionView;
import fr.univlorraine.ecandidat.views.ErreurView;
import fr.univlorraine.ecandidat.views.MaintenanceView;
import fr.univlorraine.ecandidat.views.OffreFormationView;
import fr.univlorraine.ecandidat.views.ScolAlertSvaView;
import fr.univlorraine.ecandidat.views.ScolCampagneView;
import fr.univlorraine.ecandidat.views.ScolCentreCandidatureView;
import fr.univlorraine.ecandidat.views.ScolDroitProfilView;
import fr.univlorraine.ecandidat.views.ScolFaqView;
import fr.univlorraine.ecandidat.views.ScolFormulaireView;
import fr.univlorraine.ecandidat.views.ScolGestCandidatDroitProfilView;
import fr.univlorraine.ecandidat.views.ScolMailModelView;
import fr.univlorraine.ecandidat.views.ScolMailTypeDecView;
import fr.univlorraine.ecandidat.views.ScolMessageView;
import fr.univlorraine.ecandidat.views.ScolMotivAvisView;
import fr.univlorraine.ecandidat.views.ScolParametreView;
import fr.univlorraine.ecandidat.views.ScolPieceJustifView;
import fr.univlorraine.ecandidat.views.ScolTagView;
import fr.univlorraine.ecandidat.views.ScolTypeDecisionView;
import fr.univlorraine.ecandidat.views.ScolTypeStatutPieceView;
import fr.univlorraine.ecandidat.views.ScolTypeStatutView;
import fr.univlorraine.ecandidat.views.ScolTypeTraitementView;
import fr.univlorraine.ecandidat.views.windows.SearchCandidatWindow;
import fr.univlorraine.ecandidat.views.windows.SearchCommissionWindow;
import fr.univlorraine.ecandidat.views.windows.SearchCtrCandWindow;
import fr.univlorraine.tools.vaadin.IAnalyticsTracker;
import fr.univlorraine.tools.vaadin.LogAnalyticsTracker;
import fr.univlorraine.tools.vaadin.PiwikAnalyticsTracker;
import lombok.Getter;
/**
* UI principale
* @author <NAME>
*/
@SuppressWarnings("serial")
@Theme("valo-ul")
@SpringUI(path = "/*")
@Push(value = PushMode.AUTOMATIC)
public class MainUI extends UI {
/** Nombre maximum de tentatives de reconnexion lors d'une déconnexion. */
private static final int TENTATIVES_RECO = 3;
/* Redirige java.util.logging vers SLF4j */
static {
SLF4JBridgeHandler.install();
}
private final Logger logger = LoggerFactory.getLogger(MainUI.class);
/* Injections */
@Resource
private transient ApplicationContext applicationContext;
@Resource
private transient UserController userController;
@Resource
private transient I18nController i18nController;
@Resource
private transient CandidatController candidatController;
@Resource
private transient TableRefController tableRefController;
@Resource
private transient LoadBalancingController loadBalancingController;
@Resource
private transient UiController uiController;
@Resource
private transient ParametreController parametreController;
@Resource
private transient AlertSvaController alertSvaController;
@Resource
private transient TagController tagController;
@Resource
private transient LockCandidatController lockCandidatController;
/* Propriétés */
@Value("${app.name}")
private String appName;
@Value("${app.version}")
private String appVersion;
@Value("${demoMode}")
private String demoMode;
@Value("${piwikAnalytics.trackerUrl:}")
private transient String piwikAnalyticsTrackerUrl;
@Value("${piwikAnalytics.siteId:}")
private transient String piwikAnalyticsSiteId;
@Value("${pushTransportMode:}")
private transient String pushTransportMode;
@Value("${sessionTimeOut:}")
private transient String sessionTimeOut;
/* Composants */
private final CssLayout menu = new CssLayout();
private final CssLayout menuLayout = new CssLayout(menu);
private final CssLayout menuButtonLayout = new CssLayout();
private final CssLayout contentLayout = new CssLayout();
private final CssLayout layoutWithSheet = new CssLayout();
private final HorizontalLayout layout = new HorizontalLayout(menuLayout, layoutWithSheet);
private final SubMenuBar subBarMenu = new SubMenuBar();
private OneClickButton lastButtonView;
private AccordionMenu accordionMenu;
private AccordionItemMenu itemMenuCtrCand;
private AccordionItemMenu itemMenuCommission;
private AccordionItemMenu itemMenuGestCandidat;
private OneClickButton changeCandBtn;
private OneClickButton createCandBtn;
/** The view provider. */
@Resource
private SpringViewProvider viewProvider;
@Getter
private IAnalyticsTracker analyticsTracker;
/** Gestionnaire de vues */
private final ReloadViewNavigator navigator = new ReloadViewNavigator(this, contentLayout);
/** Nom de la dernière vue visitée */
private String currentViewName = null;
/** Noms des vues et boutons du menu associés */
private final Map<String, Menu> viewButtons = new HashMap<>();
/** Noms des vues et numéro accordeon associé */
private final Map<String, String> viewAccordion = new HashMap<>();
/** Noms des vues et numéro accordeon associé */
private final Map<String, String> viewAccordionCtrCand = new HashMap<>();
/** Noms des vues et numéro accordeon associé */
private final Map<String, String> viewAccordionCommission = new HashMap<>();
/** Noms des vues et numéro accordeon associé */
private final Map<String, String> viewAccordionGestCandidat = new HashMap<>();
/** Temoin permettant de savoir si on a déjà ajouté les alertes SVA : à n'ajouter qu'une fois!! */
private Boolean isSvaAlertDisplay = false;
/** Les infos en cours d'edition */
private Integer idCtrCandEnCours = null;
private Integer idCommissionEnCours = null;
private String noDossierCandidatEnCours = null;
/** ID de l'UI pour les locks */
private String uiId = null;
private static final String SELECTED_ITEM = "selected";
/**
* @see com.vaadin.ui.UI#getCurrent()
* @return MainUI courante
*/
public static MainUI getCurrent() {
return (MainUI) UI.getCurrent();
}
/** @see com.vaadin.ui.UI#init(com.vaadin.server.VaadinRequest) */
@Override
protected void init(final VaadinRequest vaadinRequest) {
/* Configuration du timeout */
configTimeOut();
/* Configuration du push */
configPush();
/* Log les erreurs non gerees */
configError();
configUiId();
configReconnectDialog();
/* Affiche le nom de l'application dans l'onglet du navigateur */
getPage().setTitle(appName);
initLayout();
initNavigator();
initAnalyticsTracker();
initLanguage();
buildTitle();
buildMenu();
/* Enregistre l'UI pour la réception de notifications */
uiController.registerUI(this);
}
/** Configure la gestion des erreurs */
private void configError() {
/* Log les erreurs non gerees */
VaadinSession.getCurrent().setErrorHandler(e -> {
Throwable cause = e.getThrowable();
while (cause instanceof Throwable) {
/* Gère les accès non autorisés */
if (cause instanceof AccessDeniedException) {
navigateToView(ErreurView.NAME);
return;
}
/* Gère les UIs détachées pour les utilisateurs déconnectés */
if (cause instanceof AuthenticationCredentialsNotFoundException || cause instanceof UIDetachedException
|| cause instanceof UploadException
|| cause instanceof IllegalStateException
|| cause instanceof SocketTimeoutException
|| MethodUtils.checkCause(cause, "SocketTimeoutException")
|| MethodUtils.checkCause(cause, "ClientAbortException")
|| cause instanceof EOFException
|| cause instanceof URISyntaxException
|| cause instanceof UIException) {
sendError();
return;
}
if (MethodUtils.checkCauseByStackTrace(cause, "FileUploadHandler", 0) || MethodUtils.checkCauseByStackTrace(cause, "OnDemandPdfBrowserOpener", 1)
|| MethodUtils.checkCauseByStackTrace(cause, "DownloadStream", 3)
|| MethodUtils.checkCauseByStackTrace(cause, "AtmosphereRequest", 7)
|| MethodUtils.checkCauseByStackTrace(cause, "AbstractTextField", 0)
|| MethodUtils.checkCauseByStackTrace(cause, "SocketChannelImpl", 4)
|| (cause instanceof CmisRuntimeException && MethodUtils.checkCauseByMessage(cause, "Bad Gateway"))
|| MethodUtils.checkCauseEmpty(cause)) {
sendError();
return;
}
cause = cause.getCause();
}
sendError();
logger.error("Erreur inconnue", e.getThrowable());
});
}
/** Envoi une notif d'erreur si possible */
private void sendError() {
try {
if (Page.getCurrent() != null) {
Notification.show("Une erreur est survenue");
}
} catch (final Exception e) {
}
}
/** Configuration du timeOut */
private void configTimeOut() {
if (getSession() != null && getSession().getSession() != null && sessionTimeOut != null) {
try {
getSession().getSession().setMaxInactiveInterval(Integer.valueOf(sessionTimeOut));
} catch (final Exception e) {
}
}
}
/** Configuration du push */
private void configPush() {
if (pushTransportMode != null && pushTransportMode.equals(Transport.LONG_POLLING.getIdentifier())) {
getPushConfiguration().setTransport(Transport.LONG_POLLING);
} else if (pushTransportMode != null && pushTransportMode.equals(Transport.WEBSOCKET_XHR.getIdentifier())) {
getPushConfiguration().setTransport(Transport.WEBSOCKET_XHR);
} else {
getPushConfiguration().setTransport(Transport.WEBSOCKET);
}
}
/** Genere l'id de l'ui */
private void configUiId() {
if (getSession() == null || getSession().getSession() == null || getSession().getSession().getId() == null) {
return;
} else {
uiId = getSession().getSession().getId() + "-" + getUIId();
}
}
/** @return l'id de l'UI */
public String getUiId() {
return uiId;
}
/** Initialise la langue */
private void initLanguage() {
i18nController.initLanguageUI(false);
}
/** Initialise le layout principal */
private void initLayout() {
layout.setSizeFull();
setContent(layout);
menuLayout.setPrimaryStyleName(ValoTheme.MENU_ROOT);
layoutWithSheet.setPrimaryStyleName(StyleConstants.VALO_CONTENT);
layoutWithSheet.addStyleName(StyleConstants.SCROLLABLE);
layoutWithSheet.setSizeFull();
final VerticalLayout vlAll = new VerticalLayout();
vlAll.addStyleName(StyleConstants.SCROLLABLE);
vlAll.setSizeFull();
subBarMenu.addStyleName(ValoTheme.TABSHEET_PADDED_TABBAR);
subBarMenu.setVisible(false);
vlAll.addComponent(subBarMenu);
contentLayout.addStyleName(StyleConstants.SCROLLABLE);
contentLayout.setSizeFull();
vlAll.addComponent(contentLayout);
vlAll.setExpandRatio(contentLayout, 1);
layoutWithSheet.addComponent(vlAll);
menuButtonLayout.addStyleName(StyleConstants.VALO_MY_MENU_MAX_WIDTH);
layout.setExpandRatio(layoutWithSheet, 1);
Responsive.makeResponsive(this);
addStyleName(ValoTheme.UI_WITH_MENU);
}
/** Va à la vue */
public void navigateToView(final String name) {
if (name.equals(currentViewName)) {
navigator.changeCurrentView();
}
navigator.navigateTo(name);
}
/** Retourne à l'accueil */
public void navigateToAccueilView() {
navigateToView(AccueilView.NAME);
}
/** Construit le titre de l'application */
private void buildTitle() {
final OneClickButton itemBtn = new OneClickButton(appName, new ThemeResource("logo.png"));
String demo = "";
if (demoMode != null && Boolean.valueOf(demoMode)) {
demo = " - Version Demo";
}
itemBtn.setDescription(appVersion + demo);
itemBtn.setPrimaryStyleName(ValoTheme.MENU_TITLE);
itemBtn.addStyleName(ValoTheme.MENU_ITEM);
itemBtn.addClickListener(e -> getNavigator().navigateTo(AccueilView.NAME));
menu.addComponent(itemBtn);
}
/** Construit le menu */
private void buildMenu() {
menu.addStyleName(ValoTheme.MENU_PART);
final OneClickButton showMenu = new OneClickButton(applicationContext.getMessage("mainUi.menu", null, getLocale()), FontAwesome.LIST);
showMenu.addStyleName(ValoTheme.BUTTON_PRIMARY);
showMenu.addStyleName(ValoTheme.BUTTON_SMALL);
showMenu.addStyleName(StyleConstants.VALO_MENU_TOGGLE);
showMenu.addClickListener(e -> {
if (menu.getStyleName().contains(StyleConstants.VALO_MENU_VISIBLE)) {
menu.removeStyleName(StyleConstants.VALO_MENU_VISIBLE);
} else {
menu.addStyleName(StyleConstants.VALO_MENU_VISIBLE);
}
});
menu.addComponent(showMenu);
menuButtonLayout.setPrimaryStyleName(StyleConstants.VALO_MENUITEMS);
menu.addComponent(menuButtonLayout);
constructMainMenu();
}
/** Reconstruit le menu apres une connexion */
public void reconstructMainMenu() {
constructMainMenu();
navigateToAccueilView();
}
/** Construit tout les boutons */
public void constructMainMenu() {
/* On recupere l'authentification */
final Authentication auth = userController.getCurrentAuthentication();
menuButtonLayout.removeAllComponents();
/* Titre: Username */
final Label usernameLabel = new Label(userController.getCurrentUserName(auth));
usernameLabel.setPrimaryStyleName(ValoTheme.MENU_SUBTITLE);
usernameLabel.setSizeUndefined();
menuButtonLayout.addComponent(usernameLabel);
/* Accueil */
if (userController.isAnonymous(auth) && !loadBalancingController.isLoadBalancingGestionnaireMode()) {
final LinkedList<SubMenu> subMenuAccueil = new LinkedList<>();
subMenuAccueil.add(new SubMenu(AccueilView.NAME, FontAwesome.POWER_OFF));
subMenuAccueil.add(new SubMenu(CandidatCreerCompteView.NAME, FontAwesome.MAGIC));
addItemMenu(applicationContext.getMessage("main.menu.accueil.title", null, getLocale()), null, FontAwesome.HOME, subMenuAccueil, null);
} else {
addItemMenu(applicationContext.getMessage("main.menu.accueil.title", null, getLocale()), AccueilView.NAME, FontAwesome.HOME, null, null);
}
/* Assistance */
addItemMenu(applicationContext.getMessage(AssistanceView.NAME + ".title", null, getLocale()), AssistanceView.NAME, FontAwesome.AMBULANCE, null, null);
/* Accueil */
addItemMenu(applicationContext.getMessage(OffreFormationView.NAME + ".title", null, getLocale()), OffreFormationView.NAME, FontAwesome.BOOKMARK, null, null);
/* Bouton de connexion */
if (userController.isAnonymous(auth)) {
final OneClickButton itemBtn = new OneClickButton(applicationContext.getMessage("btnConnect", null, getLocale()), FontAwesome.POWER_OFF);
itemBtn.addClickListener(e -> userController.connectCAS());
itemBtn.setPrimaryStyleName(ValoTheme.MENU_ITEM);
menuButtonLayout.addComponent(itemBtn);
} else {
final OneClickButton itemBtn = new OneClickButton(applicationContext.getMessage("btnDisconnect", null, getCurrent().getLocale()), FontAwesome.POWER_OFF);
itemBtn.addClickListener(e -> {
userController.deconnect();
});
itemBtn.setPrimaryStyleName(ValoTheme.MENU_ITEM);
menuButtonLayout.addComponent(itemBtn);
}
/* Bouton permettant de rétablir l'utilisateur ayant changé de rôle */
if (userController.isUserSwitched()) {
final OneClickButton btnSwitchUserBack = new OneClickButton(applicationContext.getMessage("admin.switchUser.btnSwitchUserBack", null, getLocale()), FontAwesome.UNDO);
btnSwitchUserBack.setPrimaryStyleName(ValoTheme.MENU_ITEM);
btnSwitchUserBack.addClickListener(e -> userController.switchBackToPreviousUser());
menuButtonLayout.addComponent(btnSwitchUserBack);
}
accordionMenu = new AccordionMenu();
menuButtonLayout.addComponent(accordionMenu);
final Boolean isCandidatMode = loadBalancingController.isLoadBalancingCandidatMode();
if (!isCandidatMode) {
/* Bouton vers la vue Admin */
if (userController.canCurrentUserAccessView(AdminView.class, auth)) {
final AccordionItemMenu itemMenuAdmin = new AccordionItemMenu(applicationContext.getMessage("admin.mainmenu", null, getLocale()), accordionMenu);
accordionMenu.addItemMenu(itemMenuAdmin, ConstanteUtils.UI_MENU_ADMIN);
final LinkedList<SubMenu> subMenuParametrage = new LinkedList<>();
subMenuParametrage.add(new SubMenu(AdminParametreView.NAME, FontAwesome.COGS));
subMenuParametrage.add(new SubMenu(AdminLangueView.NAME, FontAwesome.FLAG));
subMenuParametrage.add(new SubMenu(AdminVersionView.NAME, FontAwesome.COG));
subMenuParametrage.add(new SubMenu(AdminCacheView.NAME, FontAwesome.DATABASE));
addItemMenu(applicationContext.getMessage(AdminParametreView.NAME + ".title", null, getLocale()), null, FontAwesome.COGS, subMenuParametrage, itemMenuAdmin);
addItemMenu(applicationContext.getMessage(AdminBatchView.NAME + ".title", null, getLocale()), AdminBatchView.NAME, FontAwesome.ROCKET, null, itemMenuAdmin);
final LinkedList<SubMenu> subMenuSession = new LinkedList<>();
subMenuSession.add(new SubMenu(AdminView.NAME, FontAwesome.WRENCH));
subMenuSession.add(new SubMenu(AdminLockCandidatView.NAME, FontAwesome.LOCK));
addItemMenu(applicationContext.getMessage(AdminView.NAME + ".title", null, getLocale()), null, FontAwesome.WRENCH, subMenuSession, itemMenuAdmin);
addItemMenu(applicationContext.getMessage(ScolCampagneView.NAME + ".title", null, getLocale()), ScolCampagneView.NAME, FontAwesome.STAR, null, itemMenuAdmin);
addItemMenu(applicationContext.getMessage(AdminDroitProfilIndView.NAME + ".title", null, getLocale()), AdminDroitProfilIndView.NAME, FontAwesome.SHIELD, null, itemMenuAdmin);
/* Opi */
if (parametreController.getIsUtiliseOpi() || parametreController.getIsUtiliseOpiPJ()) {
addItemMenu(applicationContext.getMessage(AdminOpiView.NAME + ".title", null, getLocale()), AdminOpiView.NAME, FontAwesome.GRADUATION_CAP, null, itemMenuAdmin);
}
}
/* Bouton vers la vue Scol centrale */
if (userController.canCurrentUserAccessView(ScolMailModelView.class, auth)) {
/* Menu maitre Scol */
final AccordionItemMenu itemMenuScol = new AccordionItemMenu(applicationContext.getMessage("scolcentrale.mainmenu", null, getLocale()), accordionMenu);
accordionMenu.addItemMenu(itemMenuScol, ConstanteUtils.UI_MENU_SCOL);
/* Menu parametres */
addItemMenu(applicationContext.getMessage(ScolParametreView.NAME + ".title", null, getLocale()), ScolParametreView.NAME, FontAwesome.COGS, null, itemMenuScol);
/* Menu droit/profil */
final LinkedList<SubMenu> subMenuDroits = new LinkedList<>();
subMenuDroits.add(new SubMenu(ScolDroitProfilView.NAME, FontAwesome.USER));
subMenuDroits.add(new SubMenu(ScolGestCandidatDroitProfilView.NAME, FontAwesome.USERS));
addItemMenu(applicationContext.getMessage("scolDroitProfilMenu.title", null, getLocale()), null, FontAwesome.USER, subMenuDroits, itemMenuScol);
/* Menu mails */
addItemMenu(applicationContext.getMessage(ScolMailModelView.NAME + ".title", null, getLocale()), ScolMailModelView.NAME, FontAwesome.ENVELOPE_O, null, itemMenuScol);
/* Paramétrages décisions */
final LinkedList<SubMenu> subMenuParamDecision = new LinkedList<>();
subMenuParamDecision.add(new SubMenu(ScolMailTypeDecView.NAME, FontAwesome.ENVELOPE));
subMenuParamDecision.add(new SubMenu(ScolTypeDecisionView.NAME, FontAwesome.GAVEL));
subMenuParamDecision.add(new SubMenu(ScolMotivAvisView.NAME, FontAwesome.BALANCE_SCALE));
addItemMenu(applicationContext.getMessage("paramDecision.menus.title", null, getLocale()), null, FontAwesome.GAVEL, subMenuParamDecision, itemMenuScol);
/* Menu Centre Candidature */
addItemMenu(applicationContext.getMessage(ScolCentreCandidatureView.NAME + ".title", null, getLocale()), ScolCentreCandidatureView.NAME, FontAwesome.BANK, null, itemMenuScol);
/* Menu pj */
addItemMenu(applicationContext.getMessage(ScolPieceJustifView.NAME + ".title", null, getLocale()), ScolPieceJustifView.NAME, FontAwesome.FILE_TEXT_O, null, itemMenuScol);
/* Menu formulaires */
addItemMenu(applicationContext.getMessage(ScolFormulaireView.NAME + ".title", null, getLocale()), ScolFormulaireView.NAME, FontAwesome.FILE_ZIP_O, null, itemMenuScol);
/* Menu alertes */
final LinkedList<SubMenu> subMenuAlert = new LinkedList<>();
subMenuAlert.add(new SubMenu(ScolAlertSvaView.NAME, FontAwesome.BELL));
subMenuAlert.add(new SubMenu(ScolTagView.NAME, FontAwesome.TAGS));
addItemMenu(applicationContext.getMessage("scolAlert.title", null, getLocale()), null, FontAwesome.BELL, subMenuAlert, itemMenuScol);
/* Menu message */
addItemMenu(applicationContext.getMessage(ScolMessageView.NAME + ".title", null, getLocale()), ScolMessageView.NAME, FontAwesome.ENVELOPE, null, itemMenuScol);
/* Menu nomenclature */
final LinkedList<SubMenu> subMenuTypDec = new LinkedList<>();
subMenuTypDec.add(new SubMenu(ScolTypeTraitementView.NAME, FontAwesome.BATTERY_QUARTER));
subMenuTypDec.add(new SubMenu(ScolTypeStatutView.NAME, FontAwesome.BATTERY_HALF));
subMenuTypDec.add(new SubMenu(ScolTypeStatutPieceView.NAME, FontAwesome.BATTERY_THREE_QUARTERS));
subMenuTypDec.add(new SubMenu(ScolFaqView.NAME, FontAwesome.QUESTION_CIRCLE));
addItemMenu(applicationContext.getMessage("scolNomenclature.title", null, getLocale()), null, FontAwesome.BATTERY_FULL, subMenuTypDec, itemMenuScol);
/* Si on veut ajouter les stats globales, decommenter ci dessous. COmmenté car trop grosses requetes */
// addItemMenu(applicationContext.getMessage("stat.menu.title", null, getLocale()), ScolStatView.NAME, FontAwesome.LINE_CHART, null, itemMenuScol);
}
/* Bouton vers la vue Centre de candidature */
if (userController.canCurrentUserAccessView(CtrCandParametreView.class, auth)) {
itemMenuCtrCand = new AccordionItemMenu(applicationContext.getMessage("ctrcand.mainmenu", null, getLocale()), accordionMenu);
accordionMenu.addItemMenu(itemMenuCtrCand, ConstanteUtils.UI_MENU_CTR);
buildMenuCtrCand(auth);
}
/* Bouton vers la vue Commission */
if (userController.canCurrentUserAccessView(CommissionCandidatureView.class, auth)) {
itemMenuCommission = new AccordionItemMenu(applicationContext.getMessage("commission.mainmenu", null, getLocale()), accordionMenu);
accordionMenu.addItemMenu(itemMenuCommission, ConstanteUtils.UI_MENU_COMM);
buildMenuCommission(auth);
}
/* Bouton vers la vue de gestion du candidat */
final Boolean isGestionnaireCandidat = userController.isGestionnaireCandidat(auth);
if (isGestionnaireCandidat || userController.isGestionnaireCandidatLS(auth)) {
itemMenuGestCandidat = new AccordionItemMenu(applicationContext.getMessage("gestcand.mainmenu", null, getLocale()), accordionMenu);
accordionMenu.addItemMenu(itemMenuGestCandidat, ConstanteUtils.UI_MENU_GEST_CAND);
if (isGestionnaireCandidat) {
createCandBtn = new OneClickButton(applicationContext.getMessage("btn.create.candidat", null, getLocale()), FontAwesome.PENCIL);
createCandBtn.setDescription(applicationContext.getMessage("btn.create.candidat", null, getLocale()));
createCandBtn.setPrimaryStyleName(ValoTheme.MENU_ITEM);
createCandBtn.addClickListener(e -> {
candidatController.createCompteMinima(true);
});
itemMenuGestCandidat.addButton(createCandBtn);
}
/* Changement de candidat */
changeCandBtn = new OneClickButton(applicationContext.getMessage("btn.find.candidat", null, getLocale()));
changeCandBtn.setDescription(applicationContext.getMessage("btn.find.candidat", null, getLocale()));
changeCandBtn.setIcon(FontAwesome.SEARCH);
changeCandBtn.setPrimaryStyleName(ValoTheme.MENU_ITEM);
changeCandBtn.addClickListener(e -> {
final SearchCandidatWindow win = new SearchCandidatWindow();
win.addCompteMinimaListener(compteMinima -> {
if (compteMinima != null) {
noDossierCandidatEnCours = compteMinima.getNumDossierOpiCptMin();
userController.setNoDossierNomCandidat(compteMinima);
buildMenuGestCand(false);
}
});
getCurrent().addWindow(win);
});
itemMenuGestCandidat.addButton(changeCandBtn);
buildMenuGestCand(false, auth);
}
} else {
/* Accès uniquement aux admins */
if (userController.canCurrentUserAccessView(AdminView.class, auth)) {
final AccordionItemMenu itemMenuAdmin = new AccordionItemMenu(applicationContext.getMessage("admin.mainmenu", null, getLocale()), accordionMenu);
accordionMenu.addItemMenu(itemMenuAdmin, ConstanteUtils.UI_MENU_ADMIN);
addItemMenu(applicationContext.getMessage(AdminVersionView.NAME + ".title", null, getLocale()), AdminVersionView.NAME, FontAwesome.COG, null, itemMenuAdmin);
addItemMenu(applicationContext.getMessage(AdminView.NAME + ".title", null, getLocale()), AdminView.NAME, FontAwesome.WRENCH, null, itemMenuAdmin);
}
}
accordionMenu.selectFirst();
/* Gestion de candidature */
if (userController.canCurrentUserAccessView(CandidatInfoPersoView.class, auth) && userController.isCandidatValid(auth)) {
final AccordionItemMenu itemMenuCandidat = new AccordionItemMenu(applicationContext.getMessage("compte.main.menu", null, getLocale()), accordionMenu, false);
accordionMenu.addItemMenu(itemMenuCandidat, ConstanteUtils.UI_MENU_CAND);
buildMenuCandidat(itemMenuCandidat);
}
focusCurrentMenu(currentViewName);
focusCurrentAccordion(currentViewName);
reloadSubMenuBar();
}
/**
* Verifie la concordance du candidat en cours d'édition avec les menus
* @param noDossierCandidat
* @return true si ok, false si nok
*/
public Boolean checkConcordanceCandidat(final String noDossierCandidat) {
if (noDossierCandidatEnCours != null && noDossierCandidat != null && !noDossierCandidatEnCours.equals(noDossierCandidat)) {
Notification.show(applicationContext.getMessage("cptMin.change.error", null, getLocale()));
// constructMainMenu();
buildMenuGestCand(true);
return false;
}
return true;
}
/**
* Contruit le menu candidat
* @param itemMenu
* l'item de menu du candidat
*/
private void buildMenuCandidat(final AccordionItemMenu itemMenu) {
final Boolean getCursusInterne = parametreController.getIsGetCursusInterne();
addItemMenu(applicationContext.getMessage("candidatInfoPersoView.title.short", null, getLocale()), CandidatInfoPersoView.NAME, FontAwesome.PENCIL, null, itemMenu);
addItemMenu(applicationContext.getMessage(CandidatAdresseView.NAME + ".title", null, getLocale()), CandidatAdresseView.NAME, FontAwesome.HOME, null, itemMenu);
addItemMenu(applicationContext.getMessage(CandidatBacView.NAME + ".title", null, getLocale()), CandidatBacView.NAME, FontAwesome.BOOK, null, itemMenu);
String txtCursusExterne;
if (getCursusInterne) {
addItemMenu(applicationContext.getMessage(CandidatCursusInterneView.NAME + ".title", null, getLocale()), CandidatCursusInterneView.NAME, FontAwesome.UNIVERSITY, null, itemMenu);
txtCursusExterne = applicationContext.getMessage(CandidatCursusExterneView.NAME + ".title", null, getLocale());
} else {
txtCursusExterne = applicationContext.getMessage(CandidatCursusExterneView.NAME + ".title.withoutCursusInterne", null, getLocale());
}
addItemMenu(txtCursusExterne, CandidatCursusExterneView.NAME, FontAwesome.GRADUATION_CAP, null, itemMenu);
addItemMenu(applicationContext.getMessage(CandidatStageView.NAME + ".title", null, getLocale()), CandidatStageView.NAME, FontAwesome.CUBE, null, itemMenu);
addItemMenu(applicationContext.getMessage("candidatFormationProView.title.short", null, getLocale()), CandidatFormationProView.NAME, FontAwesome.CUBES, null, itemMenu);
addItemMenu(applicationContext.getMessage("main.menu.candidature.title", null, getLocale()), CandidatCandidaturesView.NAME, FontAwesome.ASTERISK, null, itemMenu);
/* On recupere l'authentification */
final Authentication auth = userController.getCurrentAuthentication();
final Boolean isGestionnaireCandidat = userController.isGestionnaireCandidat(auth);
if (isGestionnaireCandidat || userController.isGestionnaireCandidatLS(auth)) {
if (isGestionnaireCandidat) {
addItemMenu(applicationContext.getMessage("gestcand.adminmenu", null, getLocale()), CandidatAdminView.NAME, FontAwesome.FLASH, null, itemMenu);
viewAccordionGestCandidat.put(CandidatAdminView.NAME, (String) itemMenu.getData());
}
viewAccordionGestCandidat.put(CandidatInfoPersoView.NAME, (String) itemMenu.getData());
viewAccordionGestCandidat.put(CandidatAdresseView.NAME, (String) itemMenu.getData());
viewAccordionGestCandidat.put(CandidatBacView.NAME, (String) itemMenu.getData());
if (getCursusInterne) {
viewAccordionGestCandidat.put(CandidatCursusInterneView.NAME, (String) itemMenu.getData());
}
viewAccordionGestCandidat.put(CandidatCursusExterneView.NAME, (String) itemMenu.getData());
viewAccordionGestCandidat.put(CandidatStageView.NAME, (String) itemMenu.getData());
viewAccordionGestCandidat.put(CandidatFormationProView.NAME, (String) itemMenu.getData());
viewAccordionGestCandidat.put(CandidatCandidaturesView.NAME, (String) itemMenu.getData());
}
}
/**
* Construit le menu de gestion de candidature
* @param reloadConcordance
* si c'est un reload suite a la nonn concordance de candidat
*/
public void buildMenuGestCand(final Boolean reloadConcordance) {
buildMenuGestCand(reloadConcordance, userController.getCurrentAuthentication());
}
/**
* Construit le menu de gestion de candidature
* @param reloadConcordance
* si c'est un reload suite a la nonn concordance de candidat
*/
private void buildMenuGestCand(final Boolean reloadConcordance, final Authentication auth) {
final UserDetails details = userController.getCurrentUser(auth);
final String noDossier = userController.getNoDossierCandidat(details);
String name = userController.getDisplayNameCandidat(details);
if (name == null || name.equals("")) {
name = noDossier;
}
if (name != null && !name.equals("")) {
noDossierCandidatEnCours = noDossier;
changeCandBtn.setCaption(name);
changeCandBtn.setIcon(null);
if (itemMenuGestCandidat.getNbButton() <= 2) {
buildMenuCandidat(itemMenuGestCandidat);
}
if (!reloadConcordance) {
navigateToView(CandidatInfoPersoView.NAME);
}
} else {
itemMenuGestCandidat.removeAllButtons(changeCandBtn, createCandBtn);
viewAccordionGestCandidat.forEach((key, value) -> {
viewButtons.remove(key);
viewAccordion.remove(key);
});
viewAccordionGestCandidat.clear();
changeCandBtn.setCaption(applicationContext.getMessage("btn.find.candidat", null, getLocale()));
changeCandBtn.setIcon(FontAwesome.SEARCH);
changeCandBtn.setVisible(true);
navigateToView(AccueilView.NAME);
}
}
/** Construit le menu centre de candidature */
public void buildMenuCtrCand() {
buildMenuCtrCand(userController.getCurrentAuthentication());
}
/** Construit le menu centre de candidature */
private void buildMenuCtrCand(final Authentication auth) {
itemMenuCtrCand.removeAllButtons();
viewAccordionCtrCand.forEach((key, value) -> {
viewButtons.remove(key);
viewAccordion.remove(key);
});
viewAccordionCtrCand.clear();
final SecurityCentreCandidature centreCandidature = userController.getCentreCandidature(auth);
if (centreCandidature != null) {
idCtrCandEnCours = centreCandidature.getIdCtrCand();
final OneClickButton ctrCandBtn = constructCtrCandChangeBtn(centreCandidature.getLibCtrCand());
ctrCandBtn.setDescription(applicationContext.getMessage("ctrCand.window.change", new Object[] { centreCandidature.getLibCtrCand() }, getLocale()));
itemMenuCtrCand.addButton(ctrCandBtn);
final Boolean isScolCentrale = userController.isScolCentrale(auth);
final List<DroitProfilFonc> listFonctionnalite = centreCandidature.getListFonctionnalite();
if (hasAccessToFonctionnalite(isScolCentrale, listFonctionnalite, NomenclatureUtils.FONCTIONNALITE_PARAM)) {
final LinkedList<SubMenu> subMenuParam = new LinkedList<>();
subMenuParam.add(new SubMenu(CtrCandParametreView.NAME, FontAwesome.COG));
subMenuParam.add(new SubMenu(CtrCandTagView.NAME, FontAwesome.TAGS));
addItemMenu(applicationContext.getMessage("param.menus.title", null, getLocale()), CtrCandStatFormView.NAME, FontAwesome.COGS, subMenuParam, itemMenuCtrCand);
viewAccordionCtrCand.put(CtrCandParametreView.NAME, (String) itemMenuCtrCand.getData());
viewAccordionCtrCand.put(CtrCandTagView.NAME, (String) itemMenuCtrCand.getData());
}
/* Stats */
if (hasAccessToFonctionnalite(isScolCentrale, listFonctionnalite, NomenclatureUtils.FONCTIONNALITE_STATS)) {
final LinkedList<SubMenu> subMenuStats = new LinkedList<>();
subMenuStats.add(new SubMenu(CtrCandStatFormView.NAME, FontAwesome.BAR_CHART));
subMenuStats.add(new SubMenu(CtrCandStatCommView.NAME, FontAwesome.PIE_CHART));
addItemMenu(applicationContext.getMessage("stat.menus.title", null, getLocale()), CtrCandStatFormView.NAME, FontAwesome.LINE_CHART, subMenuStats, itemMenuCtrCand);
viewAccordionCtrCand.put(CtrCandStatFormView.NAME, (String) itemMenuCtrCand.getData());
viewAccordionCtrCand.put(CtrCandStatCommView.NAME, (String) itemMenuCtrCand.getData());
}
/* Paramétrage CC */
if (userController.isMenuParamCCOpen(idCtrCandEnCours) && hasAccessToFonctionnalite(isScolCentrale, listFonctionnalite, NomenclatureUtils.FONCTIONNALITE_GEST_PARAM_CC)) {
final LinkedList<SubMenu> subMenuParamCC = new LinkedList<>();
subMenuParamCC.add(new SubMenu(CtrCandMailTypeDecView.NAME, FontAwesome.ENVELOPE));
subMenuParamCC.add(new SubMenu(CtrCandTypeDecisionView.NAME, FontAwesome.GAVEL));
subMenuParamCC.add(new SubMenu(CtrCandMotivAvisView.NAME, FontAwesome.BALANCE_SCALE));
addItemMenu(applicationContext.getMessage("paramDecision.menus.title", null, getLocale()), CtrCandTypeDecisionView.NAME, FontAwesome.GAVEL, subMenuParamCC, itemMenuCtrCand);
viewAccordionCtrCand.put(CtrCandMailTypeDecView.NAME, (String) itemMenuCtrCand.getData());
viewAccordionCtrCand.put(CtrCandTypeDecisionView.NAME, (String) itemMenuCtrCand.getData());
viewAccordionCtrCand.put(CtrCandMotivAvisView.NAME, (String) itemMenuCtrCand.getData());
}
/* Commission */
if (hasAccessToFonctionnalite(isScolCentrale, listFonctionnalite, NomenclatureUtils.FONCTIONNALITE_GEST_COMMISSION)) {
addItemMenu(applicationContext.getMessage(CtrCandCommissionView.NAME + ".title", null, getLocale()), CtrCandCommissionView.NAME, FontAwesome.CALENDAR, null, itemMenuCtrCand);
viewAccordionCtrCand.put(CtrCandCommissionView.NAME, (String) itemMenuCtrCand.getData());
}
/* PJ */
if (hasAccessToFonctionnalite(isScolCentrale, listFonctionnalite, NomenclatureUtils.FONCTIONNALITE_GEST_PJ)) {
final LinkedList<SubMenu> subMenuPj = new LinkedList<>();
subMenuPj.add(new SubMenu(CtrCandPieceJustifView.NAME, FontAwesome.FILE_TEXT_O));
subMenuPj.add(new SubMenu(CtrCandPieceJustifCommunView.NAME, FontAwesome.FILES_O));
addItemMenu(applicationContext.getMessage(CtrCandPieceJustifView.NAME + ".title", null, getLocale()), CtrCandPieceJustifView.NAME, FontAwesome.FILE_TEXT_O, subMenuPj, itemMenuCtrCand);
viewAccordionCtrCand.put(CtrCandPieceJustifView.NAME, (String) itemMenuCtrCand.getData());
viewAccordionCtrCand.put(CtrCandPieceJustifCommunView.NAME, (String) itemMenuCtrCand.getData());
}
/* Formulaires */
if (hasAccessToFonctionnalite(isScolCentrale, listFonctionnalite, NomenclatureUtils.FONCTIONNALITE_GEST_FORMULAIRE)) {
final LinkedList<SubMenu> subMenuForm = new LinkedList<>();
subMenuForm.add(new SubMenu(CtrCandFormulaireView.NAME, FontAwesome.FILE_ZIP_O));
subMenuForm.add(new SubMenu(CtrCandFormulaireCommunView.NAME, FontAwesome.FILES_O));
addItemMenu(applicationContext.getMessage(CtrCandFormulaireView.NAME + ".title", null, getLocale()), CtrCandFormulaireView.NAME, FontAwesome.FILE_ZIP_O, subMenuForm, itemMenuCtrCand);
viewAccordionCtrCand.put(CtrCandFormulaireView.NAME, (String) itemMenuCtrCand.getData());
viewAccordionCtrCand.put(CtrCandFormulaireCommunView.NAME, (String) itemMenuCtrCand.getData());
}
/* Formation */
if (hasAccessToFonctionnalite(isScolCentrale, listFonctionnalite, NomenclatureUtils.FONCTIONNALITE_GEST_FORMATION)) {
addItemMenu(applicationContext.getMessage(CtrCandFormationView.NAME + ".title", null, getLocale()), CtrCandFormationView.NAME, FontAwesome.LEAF, null, itemMenuCtrCand);
viewAccordionCtrCand.put(CtrCandFormationView.NAME, (String) itemMenuCtrCand.getData());
}
/* Candidatures */
if (hasAccessToFonctionnalite(isScolCentrale, listFonctionnalite, NomenclatureUtils.FONCTIONNALITE_GEST_CANDIDATURE)) {
final LinkedList<SubMenu> subMenuCandidatures = new LinkedList<>();
subMenuCandidatures.add(new SubMenu(CtrCandCandidatureView.NAME, FontAwesome.BRIEFCASE));
subMenuCandidatures.add(new SubMenu(CtrCandCandidatureCanceledView.NAME, FontAwesome.WARNING));
subMenuCandidatures.add(new SubMenu(CtrCandCandidatureArchivedView.NAME, FontAwesome.FOLDER_OPEN));
addItemMenu(applicationContext.getMessage(CtrCandCandidatureView.NAME + ".title", null, getLocale()),
CtrCandCandidatureView.NAME,
FontAwesome.BRIEFCASE,
subMenuCandidatures,
itemMenuCtrCand);
viewAccordionCtrCand.put(CtrCandCandidatureView.NAME, (String) itemMenuCtrCand.getData());
viewAccordionCtrCand.put(CtrCandCandidatureCanceledView.NAME, (String) itemMenuCtrCand.getData());
viewAccordionCtrCand.put(CtrCandCandidatureArchivedView.NAME, (String) itemMenuCtrCand.getData());
/* L'utilisateur a accès aux ecran de candidature-->on ajoute les css */
initAlertSva();
}
} else {
final OneClickButton ctrCandBtn = constructCtrCandChangeBtn(applicationContext.getMessage("ctrCand.window.change.default", null, getLocale()));
itemMenuCtrCand.addButton(ctrCandBtn);
}
}
/**
* Construit le bouton de recherche de centre
* @param libelle
* le libelle du bouton
* @return le bouton de recherche
*/
private OneClickButton constructCtrCandChangeBtn(final String libelle) {
final OneClickButton ctrCandBtn = new OneClickButton(libelle);
ctrCandBtn.setPrimaryStyleName(ValoTheme.MENU_ITEM);
ctrCandBtn.addClickListener(e -> {
final SearchCtrCandWindow win = new SearchCtrCandWindow();
win.addCentreCandidatureListener(centre -> {
userController.setCentreCandidature(centre);
buildMenuCtrCand();
navigateToView(AccueilView.NAME);
idCtrCandEnCours = centre.getIdCtrCand();
});
getCurrent().addWindow(win);
});
return ctrCandBtn;
}
/**
* Vérifie que le centre de candidature en cours d'edition est le même que celui dans la session
* @param ctrCand
* @return true si ok
*/
public Boolean checkConcordanceCentreCandidature(final CentreCandidature ctrCand) {
if (idCtrCandEnCours != null && (ctrCand == null || (ctrCand != null && !idCtrCandEnCours.equals(ctrCand.getIdCtrCand())))) {
Notification.show(applicationContext.getMessage("ctrCand.change.error", null, getLocale()));
buildMenuCtrCand();
return false;
}
return true;
}
/** Construit le menu de commission */
public void buildMenuCommission() {
buildMenuCommission(userController.getCurrentAuthentication());
}
/** Construit le menu de commission */
private void buildMenuCommission(final Authentication auth) {
itemMenuCommission.removeAllButtons();
viewAccordionCommission.forEach((key, value) -> {
viewButtons.remove(key);
viewAccordion.remove(key);
});
viewAccordionCommission.clear();
final SecurityCommission commission = userController.getCommission(auth);
if (commission != null) {
final Boolean isScolCentrale = userController.isScolCentrale(auth);
final List<DroitProfilFonc> listFonctionnalite = commission.getListFonctionnalite();
idCommissionEnCours = commission.getIdComm();
final OneClickButton commissionBtn = constructCommissionChangeBtn(commission.getLibComm());
commissionBtn.setDescription(applicationContext.getMessage("commission.window.change", new Object[] { commission.getLibComm() }, getLocale()));
itemMenuCommission.addButton(commissionBtn);
if (hasAccessToFonctionnalite(isScolCentrale, listFonctionnalite, NomenclatureUtils.FONCTIONNALITE_PARAM)) {
addItemMenu(applicationContext.getMessage(CommissionParametreView.NAME + ".title", null, getLocale()), CommissionParametreView.NAME, FontAwesome.COG, null, itemMenuCommission);
viewAccordionCommission.put(CommissionParametreView.NAME, (String) itemMenuCommission.getData());
}
if (hasAccessToFonctionnalite(isScolCentrale, commission.getListFonctionnalite(), NomenclatureUtils.FONCTIONNALITE_GEST_CANDIDATURE)) {
addItemMenu(applicationContext.getMessage(CommissionCandidatureView.NAME + ".title", null, getLocale()),
CommissionCandidatureView.NAME,
FontAwesome.BRIEFCASE,
null,
itemMenuCommission);
viewAccordionCommission.put(CommissionCandidatureView.NAME, (String) itemMenuCommission.getData());
/* L'utilisateur a accès aux ecran de candidature-->on ajoute les alertes SVA */
initAlertSva();
}
} else {
final OneClickButton commissionBtn = constructCommissionChangeBtn(applicationContext.getMessage("commission.window.change.default", null, getLocale()));
itemMenuCommission.addButton(commissionBtn);
}
}
/**
* Construit le bouton de recherche de commission
* @param libelle
* le libellé du bouton
* @return le bouton de recherche
*/
private OneClickButton constructCommissionChangeBtn(final String libelle) {
final OneClickButton commissionBtn = new OneClickButton(libelle);
commissionBtn.setPrimaryStyleName(ValoTheme.MENU_ITEM);
commissionBtn.addClickListener(e -> {
final SearchCommissionWindow win = new SearchCommissionWindow(null);
win.addCommissionListener(comm -> {
userController.setCommission(comm);
buildMenuCommission();
navigateToView(AccueilView.NAME);
idCommissionEnCours = comm.getIdComm();
});
getCurrent().addWindow(win);
});
return commissionBtn;
}
/**
* Verifie la concordance de la commission en cours d'édition avec la commission en session
* @param commission
* @return true si la concordance est ok
*/
public Boolean checkConcordanceCommission(final Commission commission) {
if (idCommissionEnCours != null && (commission == null || (commission != null && !idCommissionEnCours.equals(commission.getIdComm())))) {
Notification.show(applicationContext.getMessage("commission.change.error", null, getLocale()));
buildMenuCommission();
return false;
}
return true;
}
/**
* Verifie si l'utilisateur a le droit d'accéder à la fonctionnalite
* @param isAdmin
* est-il admin
* @param listFonctionnalite
* la liste des fonctionnalite du gestionnaire
* @param codFonc
* le code de la fonctionnalite a tester
* @return true si il a acces, false sinon
*/
private Boolean hasAccessToFonctionnalite(final Boolean isScolCentrale, final List<DroitProfilFonc> listFonctionnalite, final String codFonc) {
if (isScolCentrale) {
return true;
}
if (listFonctionnalite != null && listFonctionnalite.stream().filter(e -> e.getDroitFonctionnalite().getCodFonc().equals(codFonc)).findFirst().isPresent()) {
return true;
}
return false;
}
/**
* Ajout d'un menu d'item avec ou sans sous menu
* @param caption
* le libelle
* @param viewName
* la vue rattachee
* @param icon
* l'icon du menu
* @param itemMenu
* l'item menu rattache
* @param mapSubMenu
* un eventuel sous-menu
*/
private void addItemMenu(final String caption, final String viewName, final com.vaadin.server.Resource icon, final LinkedList<SubMenu> subMenus, final AccordionItemMenu itemMenu) {
final OneClickButton itemBtn = new OneClickButton(caption, icon);
final Menu menu = new Menu(viewName, subMenus, itemBtn);
itemBtn.setPrimaryStyleName(ValoTheme.MENU_ITEM);
/* Pas de sous menu */
if (subMenus == null) {
itemBtn.addClickListener(e -> {
navigateToView(viewName);
});
viewButtons.put(viewName, menu);
if (itemMenu != null) {
viewAccordion.put(viewName, (String) itemMenu.getData());
}
}
/* Des sous menu, on associe le bouton du menu à chaque vue de sous menu */
else {
subMenus.forEach(e -> {
viewButtons.put(e.getVue(), menu);
if (itemMenu != null) {
viewAccordion.put(e.getVue(), (String) itemMenu.getData());
}
});
itemBtn.addClickListener(e -> {
navigateToView(subMenus.getFirst().getVue());
});
}
if (itemMenu == null) {
menuButtonLayout.addComponent(itemBtn);
} else {
itemMenu.addButton(itemBtn);
}
}
/**
* Construction du sous-menu
* @param menu
* le menu
* @param vue
* la vue rattachee
*/
private void contructSubMenu(final Menu menu, final String vue) {
if (menu.hasSubMenu()) {
/* Si le menu n'a pas déjà été créé lors de la dernière action */
if (lastButtonView == null || !lastButtonView.equals(menu.getBtn())) {
subBarMenu.constructMenuBar(menu, navigator, vue);
} else {
// on bouge vers la vue
subBarMenu.selectSubMenuSheet(menu, vue, navigator, true);
}
subBarMenu.setVisible(true);
} else {
subBarMenu.setVisible(false);
}
/* On stocke le dernier bouton cliqué pour ne pas avoir à reconstruire le menu à chaque fois */
lastButtonView = menu.getBtn();
}
/** Configure la reconnexion en cas de déconnexion. */
private void configReconnectDialog() {
getReconnectDialogConfiguration().setDialogModal(true);
getReconnectDialogConfiguration().setReconnectAttempts(TENTATIVES_RECO);
configReconnectDialogMessages();
}
/** Modifie les messages de reconnexion */
public void configReconnectDialogMessages() {
getReconnectDialogConfiguration().setDialogText(applicationContext.getMessage("vaadin.reconnectDialog.text", null, getLocale()));
getReconnectDialogConfiguration().setDialogTextGaveUp(applicationContext.getMessage("vaadin.reconnectDialog.textGaveUp", null, getLocale()));
}
/** Initialise le gestionnaire de vues */
private void initNavigator() {
navigator.addProvider(viewProvider);
navigator.setErrorProvider(new ViewProvider() {
@Override
public String getViewName(final String viewAndParameters) {
return ErreurView.NAME;
}
@Override
public View getView(final String viewName) {
return viewProvider.getView(ErreurView.NAME);
}
});
navigator.addViewChangeListener(new ViewChangeListener() {
@Override
public boolean beforeViewChange(final ViewChangeEvent event) {
if (!event.getViewName().equals(AccueilView.NAME) && !event.getViewName().equals(ErreurView.NAME)
&& !event.getViewName().equals(CandidatCompteMinimaView.NAME)
&& !event.getViewName().equals(MaintenanceView.NAME)
&& !viewButtons.containsKey(event.getViewName())) {
navigateToView(ErreurView.NAME);
return false;
}
viewButtons.values().forEach(menu -> menu.getBtn().removeStyleName(SELECTED_ITEM));
if (uiController.redirectToMaintenanceView(event.getViewName())) {
navigateToView(MaintenanceView.NAME);
return false;
}
return true;
}
@Override
public void afterViewChange(final ViewChangeEvent event) {
focusCurrentMenu(event.getViewName());
final Menu menuItem = viewButtons.get(event.getViewName());
if (menuItem != null && menuItem.getBtn() instanceof OneClickButton) {
contructSubMenu(menuItem, event.getViewName());
}
focusCurrentAccordion(event.getViewName());
currentViewName = event.getViewName();
menu.removeStyleName(StyleConstants.VALO_MENU_VISIBLE);
}
});
/* Résout la vue à afficher */
final String fragment = Page.getCurrent().getUriFragment();
if (fragment == null || fragment.isEmpty()) {
navigateToView(AccueilView.NAME);
}
}
/** Recharge la bar de submenu lors d'un changement de langue */
private void reloadSubMenuBar() {
if (currentViewName == null) {
return;
}
final Menu menu = viewButtons.get(currentViewName);
if (menu != null) {
contructSubMenu(menu, currentViewName);
}
}
/**
* Focus le menu courant
* @param viewName
*/
private void focusCurrentMenu(final String viewName) {
if (viewName != null) {
final Menu menu = viewButtons.get(viewName);
if (menu != null && menu.getBtn() instanceof OneClickButton) {
menu.getBtn().addStyleName(SELECTED_ITEM);
menu.getBtn().focus();
}
}
}
/**
* Focus l'accordéon courant
* @param viewName
*/
private void focusCurrentAccordion(final String viewName) {
final String idAccordion = viewAccordion.get(viewName);
if (idAccordion != null && !idAccordion.equals(accordionMenu.getItemId())) {
accordionMenu.changeItem(idAccordion);
}
}
/** Ajoute le css des alertes SVA */
private void initAlertSva() {
if (isSvaAlertDisplay) {
return;
}
final List<String> listeAlerteSvaCss = alertSvaController.getListAlertSvaCss();
/* On ajoute les css colorisant les lignes pour sva */
for (final String alertCss : listeAlerteSvaCss) {
Page.getCurrent().getStyles().add(alertCss);
}
isSvaAlertDisplay = true;
}
/** Initialise le tracker d'activité. */
private void initAnalyticsTracker() {
if (piwikAnalyticsTrackerUrl instanceof String && piwikAnalyticsTrackerUrl != null
&& !piwikAnalyticsTrackerUrl.equals("")
&& piwikAnalyticsSiteId instanceof String
&& piwikAnalyticsSiteId != null
&& !piwikAnalyticsSiteId.equals("")) {
analyticsTracker = new PiwikAnalyticsTracker(this, piwikAnalyticsTrackerUrl, piwikAnalyticsSiteId);
} else {
analyticsTracker = new LogAnalyticsTracker();
}
analyticsTracker.trackNavigator(navigator);
}
/** @see com.vaadin.ui.UI#detach() */
@Override
public void detach() {
lockCandidatController.removeAllLockUI(uiId);
/* Se désinscrit de la réception de notifications */
uiController.unregisterUI(this);
super.detach();
}
}
|
pradeep2304/RAutomation | spec/adapter/ms_uia/keystroke_converter_spec.rb | $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require 'rautomation'
require 'rspec'
describe "KeystrokeConverter", :if => SpecHelper.adapter == :ms_uia do
it "converts plain ASCII" do
codes = RAutomation::Adapter::MsUia::KeystrokeConverter.convert("abc")
converted_keys = convert_keys "abc"
codes.should == converted_keys
end
it "uses caps lock for entering downcase keys" do
codes = RAutomation::Adapter::MsUia::KeystrokeConverter.convert("aBc")
converted_keys = convert_keys "abc"
converted_keys = converted_keys.insert(1, RAutomation::Adapter::MsUia::Constants::VK_LSHIFT)
codes.should == converted_keys
end
it "converts special keys" do
codes = RAutomation::Adapter::MsUia::KeystrokeConverter.convert("{tab}a{backspace}b{enter}c {left}d{right}ee{down}f{up}g{unsupported}{home}{end}{delete}")
expected_codes = [
RAutomation::Adapter::MsUia::Constants::VK_TAB,
convert_keys("a"),
RAutomation::Adapter::MsUia::Constants::VK_BACK,
convert_keys("b"),
RAutomation::Adapter::MsUia::Constants::VK_RETURN,
convert_keys("c"),
RAutomation::Adapter::MsUia::Constants::VK_SPACE,
RAutomation::Adapter::MsUia::Constants::VK_LEFT,
convert_keys("d"),
RAutomation::Adapter::MsUia::Constants::VK_RIGHT,
convert_keys("ee"),
RAutomation::Adapter::MsUia::Constants::VK_DOWN,
convert_keys("f"),
RAutomation::Adapter::MsUia::Constants::VK_UP,
convert_keys("g"),
convert_keys("unsupported"),
RAutomation::Adapter::MsUia::Constants::VK_HOME,
RAutomation::Adapter::MsUia::Constants::VK_END,
RAutomation::Adapter::MsUia::Constants::VK_DELETE
].flatten
codes.should == expected_codes
end
def convert_keys keys
keys.split("").map { |k| k.upcase.unpack("c")[0] }
end
end
|
jdegges/DNArm | src/fgmtest/fgm_interface.omp.c |
#include "fgm.h"
#include "omp.h"
int fgmLaunch(uint32_t * list, uint32_t listLen, struct cgmResult * cgmData, int cgmDLen) //return value is a success/fail indicator
{
int i, x;
mData ** mutation; //keeping track of our mutations...
mutation = malloc(sizeof(mData*) * cgmDLen); //make an array of pointers corresponding to the length of the cgmData passed in.
omp_set_num_threads(8);
#pragma omp parallel for
for(x = 0; x < cgmDLen; x++) //run through all elements of our batch.
{
fgm(&(mutation[x]),list, listLen, 48, cgmData[x]->matches, cgmData[x]->length, cgmData[x]->read); //try to obtain a fine grain match of the given readSequence.
// printf("list\t\tlistlen\t48\tmatches\tlength\tread\t\n%x\t%d\t%d\t%d\t%d\n", list[0], listLen, 48, cgmData[x]->matches[0], cgmData[x]->length, cgmData[x]->read);
}
#pragma omp single
{
for(x = 0; x < cgmDLen; x++) //printout loop.
{
if(mutation[x]->len != -1) //make sure there's actually data to print out...
{
printf("%d\n", mutation[x]->len);
for(i = 0; i < mutation[x]->len; i++) //iterate over the mutation list, find all differences.
{
switch(mutation[x]->ins[i])
{
case DEL:
printf("Type: DEL Location: %u\n", mutation[x]->ins[i], mutation[x]->locs[i]);
break;
case SNP:
printf("Type: SNP Mutation: %c Location: %u\n", mutation[x]->ins[i], mutation[x]->mods[i], mutation[x]->locs[i]);
break;
case INS:
printf("Type: INS Mutation: %c Location: %u\n", mutation[x]->ins[i], mutation[x]->mods[i], mutation[x]->locs[i]);
break;
default:
break; //silently fail..
}
}
//clear up the data that was created by the fine grain matcher.
// free(mutation[x]->mods);
// free(mutation[x]->locs);
// free(mutation[x]->ins);
// free(mutation[x]);
//if these are not cleared, there's a memory leak. each call to fgm will create a new mData.
}
//else DO NOTHING... if we can't generate a match, then we'll fail silently.
}
free(mutation); //free the pointer array.
}
return 0;
}
|
ducky-hong/gigapaxos | src/edu/umass/cs/nio/nioutils/NIOBSTester.java | <filename>src/edu/umass/cs/nio/nioutils/NIOBSTester.java
package edu.umass.cs.nio.nioutils;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.SocketChannel;
import edu.umass.cs.gigapaxos.paxosutil.RateLimiter;
import edu.umass.cs.nio.NIOTransport;
import edu.umass.cs.nio.interfaces.DataProcessingWorker;
import edu.umass.cs.utils.Util;
/**
* @author arun
*
* A simple local tester for SSL. Run this class in two separate
* terminals, one with argument 100, and the other with argument 101.
* You also need to configure SSL parameters like keyStore, trustStore
* and their passwords for the JVM.
*/
public class NIOBSTester {
static NIOTransport<Integer> niot1 = null, niot2 = null;
static long t = System.currentTimeMillis();
static boolean twoWay = true;
/**
* @param args
*/
public static void main(String[] args) {
int offset = (args.length > 0 ? Integer.valueOf(args[0]) : 0);
int id1 = 101 + offset, id2 = 102 + offset;
SampleNodeConfig<Integer> snc = new SampleNodeConfig<Integer>();
snc.addLocal(id1);
snc.addLocal(id2);
final InetSocketAddress isa1 = new InetSocketAddress(snc.getNodeAddress(id1),
snc.getNodePort(id1));
InetSocketAddress isa2 = new InetSocketAddress(snc.getNodeAddress(id2),
snc.getNodePort(id2));
final int numTestMessages = 1000000;
RateLimiter r = new RateLimiter(400000);
int size = 1000;
String gibberish = "|47343289u2309exi4322|";
while (gibberish.length() < size)
gibberish += gibberish;
gibberish = gibberish.substring(0, size);
byte[] sendBytes = gibberish.getBytes();
final int msgSize = sendBytes.length;
final byte[] replyBytes = new byte[sendBytes.length / 1];
final int replySize = replyBytes.length;
final int replyRatio = 1;
final int printFreq = numTestMessages / 10 / replyRatio;
final int batchSize = 1;
class DPWEcho implements DataProcessingWorker {
int count = 0;
int msgCount = 0;
@Override
public void processData(SocketChannel socket, ByteBuffer incoming) {
byte[] buf = new byte[incoming.remaining()];
incoming.get(buf);
boolean sendReply = false;
synchronized (this) {
count += buf.length;
msgCount++;
sendReply = ((count / msgSize) % (batchSize * replyRatio) == 0);
}
if (sendReply) {
try {
if (twoWay)
while(niot2.send(isa1, buf, batchSize) <= 0);
} catch (Exception e) {
e.printStackTrace();
}
if (count == numTestMessages * msgSize)
System.out
.println("Request receipt rate after receiving *ALL* "
+ msgCount
+ " requests and "
+ count
+ " bytes = "
+ Util.df(count
/ msgSize
* 1000.0
/ (System.currentTimeMillis() - t))
+ "/sec ");
}
}
@Override
public void demultiplexMessage(Object message) {
throw new RuntimeException("Should not get here");
}
}
class DPWSender implements DataProcessingWorker {
int count = 0;
int msgCount = 0;
@Override
public void processData(SocketChannel socket, ByteBuffer incoming) {
byte[] message = new byte[incoming.remaining()];
incoming.get(message);
synchronized (this) {
count += message.length;
msgCount++;
}
if (count == (numTestMessages * replySize) / replyRatio) {
// if (msgCount == numTestMessages) {
System.out.println("Response rate after ALL "
// + msgCount
+ count
+ " bytes and "
+ msgCount
+ " packets = "
+ Util.df(count / replySize * 1000.0
/ (System.currentTimeMillis() - t))
+ "/sec " + "; total_time = "
+ (System.currentTimeMillis() - t) / 1000
+ " secs; exiting");
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
niot1.stop();
niot2.stop();
} else if (msgCount % printFreq == 0) {
System.out.println("Response rate after "
+ count
+ " bytes and "
+ msgCount
+ " packets = "
+ Util.df(count / replySize * 1000.0
/ (System.currentTimeMillis() - t))
+ "/sec ");
}
}
@Override
public void demultiplexMessage(Object message) {
throw new RuntimeException("Should not get here");
}
}
try {
niot1 = new NIOTransport<Integer>(id1, snc, new DPWSender());
niot2 = new NIOTransport<Integer>(id2, snc, new DPWEcho());
t = System.currentTimeMillis();
int numBytesSent = 0;
for (int i = 0; i < numTestMessages / batchSize; i++) {
int curSent = 0;
while((curSent = niot1.send(isa2, sendBytes, batchSize)) <= 0) Thread.yield();
numBytesSent += curSent;
r.record();
}
System.out.println("Sent "
+ numTestMessages
+ " and "
+ numBytesSent
+ " bytes at "
+ Util.df(numTestMessages * 1000.0
/ (System.currentTimeMillis() - t)) + "/sec");
{
Thread.sleep(10000);
numBytesSent += niot1.send(isa2,
("*******poke*******" + gibberish).getBytes(),
batchSize) + 8;
}
} catch (Exception e) {
e.printStackTrace();
niot1.stop();
niot2.stop();
}
}
}
|
rennat/django_delta_logger | example_django_project/example_django_project/serialization.py | from django.conf import settings
from django.utils.module_loading import import_string
JsonEncoder = import_string(settings.JSON_ENCODER)
|
cgghali/TAF | pytests/epengine/collection_crud_negative.py | <filename>pytests/epengine/collection_crud_negative.py
import json
from random import choice
from BucketLib.bucket import Bucket
from Cb_constants import DocLoading
from bucket_collections.collections_base import CollectionBase
from cb_tools.cbstats import Cbstats
from collections_helper.collections_spec_constants import MetaCrudParams
from couchbase_helper.durability_helper import DurabilityHelper
from error_simulation.cb_error import CouchbaseError
from remote.remote_util import RemoteMachineShellConnection
from sdk_exceptions import SDKException
class CollectionDurabilityTests(CollectionBase):
def setUp(self):
super(CollectionDurabilityTests, self).setUp()
self.bucket = self.bucket_util.buckets[0]
self.with_non_sync_writes = self.input.param("with_non_sync_writes",
False)
self.num_nodes_affected = 1
if self.num_replicas > 1:
self.num_nodes_affected = 2
self.verification_dict = dict()
self.verification_dict["ops_create"] = 0
self.verification_dict["ops_update"] = 0
self.verification_dict["ops_delete"] = 0
self.verification_dict["rollback_item_count"] = 0
self.verification_dict["sync_write_committed_count"] = 0
# Populate initial cb_stat values as per num_items
for _, scope in self.bucket.scopes.items():
for _, collection in scope.collections.items():
self.verification_dict["ops_create"] += collection.num_items
if self.durability_helper.is_sync_write_enabled(
self.bucket_durability_level, self.durability_level):
self.verification_dict["sync_write_committed_count"] \
+= self.num_items
def tearDown(self):
super(CollectionDurabilityTests, self).tearDown()
def __get_random_durability_level(self):
supported_d_levels = [d_level for d_level in self.supported_d_levels]
supported_d_levels.remove(Bucket.DurabilityLevel.NONE)
return choice(supported_d_levels)
def __get_d_level_and_error_to_simulate(self):
self.simulate_error = CouchbaseError.STOP_PERSISTENCE
self.durability_level = self.__get_random_durability_level()
if self.durability_level == Bucket.DurabilityLevel.MAJORITY:
self.simulate_error = CouchbaseError.STOP_MEMCACHED
self.log.info("Testing with durability_level=%s, simulate_error=%s"
% (self.durability_level, self.simulate_error))
def test_crud_failures(self):
"""
Test to configure the cluster in such a way durability will always fail
1. Try creating the docs with durability set
2. Verify create failed with durability_not_possible exception
3. Create docs using async_writes
4. Perform update and delete ops with durability
5. Make sure these ops also fail with durability_not_possible exception
"""
vb_info = dict()
shell_conn = dict()
cbstat_obj = dict()
vb_info["create_stat"] = dict()
vb_info["failure_stat"] = dict()
nodes_in_cluster = self.cluster_util.get_kv_nodes()
sub_doc_test = self.input.param("sub_doc_test", False)
if sub_doc_test:
self.load_data_for_sub_doc_ops(self.verification_dict)
failed = self.durability_helper.verify_vbucket_details_stats(
self.bucket, self.cluster_util.get_kv_nodes(),
vbuckets=self.cluster_util.vbuckets,
expected_val=self.verification_dict)
if failed:
self.fail("Cbstat vbucket-details verification failed")
# Override durability_level to test
self.durability_level = self.__get_random_durability_level()
self.log.info("Testing with durability_level=%s"
% self.durability_level)
doc_load_spec = dict()
doc_load_spec["doc_crud"] = dict()
doc_load_spec["subdoc_crud"] = dict()
doc_load_spec[MetaCrudParams.SKIP_READ_ON_ERROR] = True
doc_load_spec[MetaCrudParams.IGNORE_EXCEPTIONS] = [
SDKException.DurabilityImpossibleException]
doc_load_spec[MetaCrudParams.SKIP_READ_ON_ERROR] = True
doc_load_spec[MetaCrudParams.SUPPRESS_ERROR_TABLE] = True
doc_load_spec[MetaCrudParams.DURABILITY_LEVEL] = \
self.durability_level
doc_load_spec["doc_crud"][MetaCrudParams.DocCrud.COMMON_DOC_KEY] = \
"test_collections"
if not sub_doc_test:
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 10
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 10
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 10
else:
doc_load_spec["subdoc_crud"][
MetaCrudParams.SubDocCrud.INSERT_PER_COLLECTION] = 10
doc_load_spec["subdoc_crud"][
MetaCrudParams.SubDocCrud.UPSERT_PER_COLLECTION] = 10
num_items_before_d_load = \
self.bucket_util.get_expected_total_num_items(self.bucket)
for node in nodes_in_cluster:
shell_conn[node.ip] = \
RemoteMachineShellConnection(self.cluster.master)
cbstat_obj[node.ip] = Cbstats(shell_conn[node.ip])
# Fetch vbucket seq_no stats from vb_seqno command for verification
vb_info["create_stat"].update(cbstat_obj[node.ip]
.vbucket_seqno(self.bucket.name))
# MB-34064 - Try same CREATE twice to validate doc cleanup in server
for _ in range(2):
collection_crud_task = \
self.bucket_util.run_scenario_from_spec(
self.task,
self.cluster,
self.bucket_util.buckets,
doc_load_spec)
if collection_crud_task.result is False:
self.log_failure("Collection MutationTask failed")
# Fetch vbucket seq_no status from cbstats after CREATE task
for node in nodes_in_cluster:
vb_info["failure_stat"].update(
cbstat_obj[node.ip].vbucket_seqno(self.bucket.name))
# Verify doc count has not changed due to expected exceptions
curr_num_items = self.bucket_util.get_bucket_current_item_count(
self.cluster, self.bucket)
if curr_num_items != num_items_before_d_load:
self.log_failure("Few mutation went in. "
"Docs expected: %s, actual: %s"
% (num_items_before_d_load, curr_num_items))
self.bucket_util.validate_docs_per_collections_all_buckets()
if vb_info["create_stat"] != vb_info["failure_stat"]:
self.log_failure(
"Failure stats mismatch. {0} != {1}"
.format(vb_info["create_stat"], vb_info["failure_stat"]))
# Rewind doc_indexes to starting point to re-use the same index
for bucket, s_dict in collection_crud_task.loader_spec.items():
for s_name, c_dict in s_dict["scopes"].items():
scope = bucket.scopes[s_name]
for c_name, _ in c_dict["collections"].items():
c_crud_data = collection_crud_task.loader_spec[
bucket]["scopes"][
s_name]["collections"][c_name]
for op_type in c_crud_data.keys():
self.bucket_util.rewind_doc_index(
scope.collections[c_name],
op_type,
c_crud_data[op_type]["doc_gen"])
# Cb stat validation before trying successful mutation
failed = self.durability_helper.verify_vbucket_details_stats(
self.bucket, self.cluster_util.get_kv_nodes(),
vbuckets=self.cluster_util.vbuckets,
expected_val=self.verification_dict)
if failed:
self.log_failure("Cbstat vbucket-details verification failed ")
if not sub_doc_test and \
vb_info["create_stat"] != vb_info["failure_stat"]:
self.log_failure("Failover stats failed to update. %s != %s"
% (vb_info["failure_stat"],
vb_info["create_stat"]))
self.validate_test_failure()
# Perform async CRUDs on the documents
doc_load_spec[MetaCrudParams.DURABILITY_LEVEL] = ""
doc_load_spec[MetaCrudParams.IGNORE_EXCEPTIONS] = []
doc_load_spec[MetaCrudParams.RETRY_EXCEPTIONS] = []
collection_crud_task = \
self.bucket_util.run_scenario_from_spec(
self.task,
self.cluster,
self.bucket_util.buckets,
doc_load_spec)
if collection_crud_task.result is False:
self.log_failure("CRUDs with async_writes failed")
# Wait for ep_queue to drain
self.bucket_util._wait_for_stats_all_buckets()
# Reset failure_stat dictionary for reuse
vb_info["failure_stat"] = dict()
# Fetch vbucket seq_no status from vb_seqno after UPDATE/DELETE task
for node in nodes_in_cluster:
vb_info["failure_stat"].update(cbstat_obj[node.ip]
.vbucket_seqno(self.bucket.name))
if not sub_doc_test and \
vb_info["create_stat"] == vb_info["failure_stat"]:
self.log_failure("Failover stats failed to update. %s == %s"
% (vb_info["failure_stat"],
vb_info["create_stat"]))
# Close all ssh sessions
for node in nodes_in_cluster:
shell_conn[node.ip].disconnect()
# Update cbstat vb-details verification counters
self.update_verification_dict_from_collection_task(
self.verification_dict,
collection_crud_task)
failed = self.durability_helper.verify_vbucket_details_stats(
self.bucket, self.cluster_util.get_kv_nodes(),
vbuckets=self.cluster_util.vbuckets,
expected_val=self.verification_dict)
if failed:
self.log_failure("Cbstat vbucket-details verification failed ")
self.validate_test_failure()
self.bucket_util.validate_docs_per_collections_all_buckets()
def test_durability_abort(self):
"""
Test to validate durability abort is triggered properly with proper
rollback on active vbucket
:return:
"""
load_task = dict()
# Override d_level, error_simulation type based on d_level
self.__get_d_level_and_error_to_simulate()
kv_nodes = self.cluster_util.get_kv_nodes()
for server in kv_nodes:
ssh_shell = RemoteMachineShellConnection(server)
cbstats = Cbstats(ssh_shell)
cb_err = CouchbaseError(self.log, ssh_shell)
target_vb_type = "replica"
if self.durability_level \
== Bucket.DurabilityLevel.MAJORITY_AND_PERSIST_TO_ACTIVE:
target_vb_type = "active"
target_vbs = cbstats.vbucket_list(self.bucket.name, target_vb_type)
doc_load_spec = dict()
doc_load_spec["doc_crud"] = dict()
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 2
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 2
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 2
doc_load_spec["doc_crud"][MetaCrudParams.DocCrud.COMMON_DOC_KEY] \
= "test_collections"
doc_load_spec[MetaCrudParams.TARGET_VBUCKETS] = target_vbs
doc_load_spec[MetaCrudParams.DURABILITY_LEVEL] \
= self.durability_level
doc_load_spec[MetaCrudParams.RETRY_EXCEPTIONS] = [
SDKException.DurabilityAmbiguousException]
doc_load_spec[MetaCrudParams.SDK_TIMEOUT] = 2
doc_load_spec[MetaCrudParams.SKIP_READ_ON_ERROR] = True
doc_load_spec[MetaCrudParams.SUPPRESS_ERROR_TABLE] = True
cb_err.create(self.simulate_error,
self.bucket_util.buckets[0].name)
load_task[server] = \
self.bucket_util.run_scenario_from_spec(
self.task,
self.cluster,
self.bucket_util.buckets,
doc_load_spec,
batch_size=1,
validate_task=False)
cb_err.revert(self.simulate_error,
self.bucket_util.buckets[0].name)
ssh_shell.disconnect()
self.validate_test_failure()
failed = self.durability_helper.verify_vbucket_details_stats(
self.bucket, kv_nodes,
vbuckets=self.cluster_util.vbuckets,
expected_val=self.verification_dict)
if failed:
self.log_failure("Cbstat vbucket-details verification failed "
"after aborts")
self.validate_test_failure()
# Retry aborted keys with healthy cluster
self.log.info("Performing CRUDs on healthy cluster")
for server in kv_nodes:
self.bucket_util.validate_doc_loading_results(
load_task[server])
if load_task[server].result is False:
self.log_failure("Doc retry task failed on %s" % server.ip)
# Update cbstat vb-details verification counters
for bucket, s_dict in load_task[server].loader_spec.items():
for s_name, c_dict in s_dict["scopes"].items():
for c_name, _ in c_dict["collections"].items():
c_crud_data = load_task[server].loader_spec[
bucket]["scopes"][
s_name]["collections"][c_name]
for op_type in c_crud_data.keys():
total_mutation = \
c_crud_data[op_type]["doc_gen"].end \
- c_crud_data[op_type]["doc_gen"].start
if op_type in DocLoading.Bucket.DOC_OPS:
self.verification_dict["ops_%s" % op_type] \
+= total_mutation
self.verification_dict[
"sync_write_committed_count"] \
+= total_mutation
failed = self.durability_helper.verify_vbucket_details_stats(
self.bucket, self.cluster_util.get_kv_nodes(),
vbuckets=self.cluster_util.vbuckets,
expected_val=self.verification_dict)
if failed:
self.log_failure("Cbstat vbucket-details verification "
"failed after ops on server: %s" % server.ip)
self.validate_test_failure()
def test_sync_write_in_progress(self):
doc_ops = self.input.param("doc_ops", "create;create").split(';')
shell_conn = dict()
cbstat_obj = dict()
error_sim = dict()
vb_info = dict()
active_vbs = dict()
replica_vbs = dict()
sync_write_in_progress = \
SDKException.RetryReason.KV_SYNC_WRITE_IN_PROGRESS
# Override d_level, error_simulation type based on d_level
self.__get_d_level_and_error_to_simulate()
# Acquire SDK client from the pool for performing doc_ops locally
client = self.sdk_client_pool.get_client_for_bucket(self.bucket)
target_nodes = DurabilityHelper.getTargetNodes(self.cluster,
self.nodes_init,
self.num_nodes_affected)
for node in target_nodes:
shell_conn[node.ip] = RemoteMachineShellConnection(node)
cbstat_obj[node.ip] = Cbstats(shell_conn[node.ip])
vb_info["init"] = dict()
vb_info["init"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(
self.bucket.name)
error_sim[node.ip] = CouchbaseError(self.log, shell_conn[node.ip])
# Fetch affected nodes' vb_num which are of type=replica
active_vbs[node.ip] = cbstat_obj[node.ip].vbucket_list(
self.bucket.name, vbucket_type="active")
replica_vbs[node.ip] = cbstat_obj[node.ip].vbucket_list(
self.bucket.name, vbucket_type="replica")
if self.durability_level \
== Bucket.DurabilityLevel.MAJORITY_AND_PERSIST_TO_ACTIVE:
target_vbs = active_vbs
target_vbuckets = list()
for target_node in target_nodes:
target_vbuckets += target_vbs[target_node.ip]
else:
target_vbuckets = replica_vbs[target_nodes[0].ip]
if len(target_nodes) > 1:
index = 1
while index < len(target_nodes):
target_vbuckets = list(
set(target_vbuckets).intersection(
set(replica_vbs[target_nodes[index].ip])
)
)
index += 1
doc_load_spec = dict()
doc_load_spec["doc_crud"] = dict()
doc_load_spec["doc_crud"][MetaCrudParams.DocCrud.COMMON_DOC_KEY] \
= "test_collections"
doc_load_spec[MetaCrudParams.TARGET_VBUCKETS] = target_vbuckets
doc_load_spec[MetaCrudParams.COLLECTIONS_CONSIDERED_FOR_CRUD] = 5
doc_load_spec[MetaCrudParams.SCOPES_CONSIDERED_FOR_CRUD] = "all"
doc_load_spec[MetaCrudParams.DURABILITY_LEVEL] = self.durability_level
doc_load_spec[MetaCrudParams.SDK_TIMEOUT] = 60
if doc_ops[0] == "create":
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 1
elif doc_ops[0] == "update":
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 1
elif doc_ops[0] == "replace":
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.REPLACE_PERCENTAGE_PER_COLLECTION] = 1
elif doc_ops[0] == "delete":
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 1
# Induce error condition for testing
for node in target_nodes:
error_sim[node.ip].create(self.simulate_error,
bucket_name=self.bucket.name)
self.sleep(3, "Wait for error simulation to take effect")
doc_loading_task = \
self.bucket_util.run_scenario_from_spec(
self.task,
self.cluster,
self.bucket_util.buckets,
doc_load_spec,
async_load=True)
self.sleep(5, "Wait for doc ops to reach server")
for bucket, s_dict in doc_loading_task.loader_spec.items():
for s_name, c_dict in s_dict["scopes"].items():
for c_name, c_meta in c_dict["collections"].items():
client.select_collection(s_name, c_name)
self.log.info("%s::%s" % (s_name, c_name))
for op_type in c_meta:
key, value = c_meta[op_type]["doc_gen"].next()
for fail_fast in [True, False]:
if self.with_non_sync_writes:
fail = client.crud(
doc_ops[1], key, value,
exp=0, timeout=2, time_unit="seconds",
fail_fast=fail_fast)
else:
fail = client.crud(
doc_ops[1], key, value,
exp=0,
durability=self.durability_level,
timeout=2, time_unit="seconds",
fail_fast=fail_fast)
expected_exception = \
SDKException.AmbiguousTimeoutException
retry_reason = \
SDKException \
.RetryReason \
.KV_SYNC_WRITE_IN_PROGRESS
if fail_fast:
expected_exception = \
SDKException.RequestCanceledException
retry_reason = sync_write_in_progress
if doc_ops[0] == "create" \
and doc_ops[1] in ["delete", "replace"]:
expected_exception = \
SDKException.DocumentNotFoundException
retry_reason = None
# Validate the returned error from the SDK
if expected_exception not in str(fail["error"]):
self.log_failure("Invalid exception for %s: %s"
% (key, fail["error"]))
if retry_reason \
and retry_reason not in str(fail["error"]):
self.log_failure(
"Invalid retry reason for %s: %s"
% (key, fail["error"]))
# Try reading the value in SyncWrite state
fail = client.crud("read", key)
if doc_ops[0] == "create":
# Expected KeyNotFound in case of CREATE op
if fail["status"] is True:
self.log_failure(
"%s returned value during SyncWrite %s"
% (key, fail))
else:
# Expects prev val in case of other operations
if fail["status"] is False:
self.log_failure(
"Key %s read failed for prev value: %s"
% (key, fail))
# Revert the introduced error condition
for node in target_nodes:
error_sim[node.ip].revert(self.simulate_error,
bucket_name=self.bucket.name)
# Wait for doc_loading to complete
self.task_manager.get_task_result(doc_loading_task)
self.bucket_util.validate_doc_loading_results(doc_loading_task)
if doc_loading_task.result is False:
self.log_failure("Doc CRUDs failed")
# Release the acquired SDK client
self.sdk_client_pool.release_client(client)
self.validate_test_failure()
def test_bulk_sync_write_in_progress(self):
doc_ops = self.input.param("doc_ops").split(';')
shell_conn = dict()
cbstat_obj = dict()
error_sim = dict()
vb_info = dict()
active_vbs = dict()
replica_vbs = dict()
sync_write_in_progress = \
SDKException.RetryReason.KV_SYNC_WRITE_IN_PROGRESS
# Override d_level, error_simulation type based on d_level
self.__get_d_level_and_error_to_simulate()
target_nodes = DurabilityHelper.getTargetNodes(self.cluster,
self.nodes_init,
self.num_nodes_affected)
for node in target_nodes:
shell_conn[node.ip] = RemoteMachineShellConnection(node)
cbstat_obj[node.ip] = Cbstats(shell_conn[node.ip])
vb_info["init"] = dict()
vb_info["init"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(
self.bucket.name)
error_sim[node.ip] = CouchbaseError(self.log, shell_conn[node.ip])
# Fetch affected nodes' vb_num which are of type=replica
active_vbs[node.ip] = cbstat_obj[node.ip].vbucket_list(
self.bucket.name, vbucket_type="active")
replica_vbs[node.ip] = cbstat_obj[node.ip].vbucket_list(
self.bucket.name, vbucket_type="replica")
target_vbs = replica_vbs
if self.durability_level \
== Bucket.DurabilityLevel.MAJORITY_AND_PERSIST_TO_ACTIVE:
target_vbs = active_vbs
target_vbuckets = list()
for target_node in target_nodes:
target_vbuckets += target_vbs[target_node.ip]
else:
target_vbuckets = target_vbs[target_nodes[0].ip]
if len(target_nodes) > 1:
index = 1
while index < len(target_nodes):
target_vbuckets = list(
set(target_vbuckets).intersection(
set(target_vbs[target_nodes[index].ip])
)
)
index += 1
doc_load_spec = dict()
doc_load_spec["doc_crud"] = dict()
doc_load_spec[MetaCrudParams.TARGET_VBUCKETS] = target_vbuckets
doc_load_spec[MetaCrudParams.DURABILITY_LEVEL] = self.durability_level
doc_load_spec[MetaCrudParams.COLLECTIONS_CONSIDERED_FOR_CRUD] = 5
doc_load_spec[MetaCrudParams.SCOPES_CONSIDERED_FOR_CRUD] = "all"
doc_load_spec[MetaCrudParams.SDK_TIMEOUT] = 60
doc_load_spec["doc_crud"][MetaCrudParams.DocCrud.COMMON_DOC_KEY] \
= "test_collections"
if doc_ops[0] == "create":
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 1
elif doc_ops[0] == "update":
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 1
elif doc_ops[0] == "replace":
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.REPLACE_PERCENTAGE_PER_COLLECTION] = 1
elif doc_ops[0] == "delete":
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 1
# Induce error condition for testing
for node in target_nodes:
error_sim[node.ip].create(self.simulate_error,
bucket_name=self.bucket.name)
doc_loading_task = \
self.bucket_util.run_scenario_from_spec(
self.task,
self.cluster,
self.bucket_util.buckets,
doc_load_spec,
async_load=True)
self.sleep(5, "Wait for doc ops to reach server")
tem_durability = self.durability_level
if self.with_non_sync_writes:
tem_durability = "NONE"
for bucket, s_dict in doc_loading_task.loader_spec.items():
for s_name, c_dict in s_dict["scopes"].items():
for c_name, c_meta in c_dict["collections"].items():
for op_type in c_meta:
# This will support both sync-write and non-sync-writes
doc_loader_task_2 = self.task.async_load_gen_docs(
self.cluster, self.bucket,
c_meta[op_type]["doc_gen"], doc_ops[1], 0,
scope=s_name, collection=c_name,
sdk_client_pool=self.sdk_client_pool,
batch_size=self.crud_batch_size,
process_concurrency=1,
replicate_to=self.replicate_to,
persist_to=self.persist_to,
durability=tem_durability, timeout_secs=3,
print_ops_rate=False,
skip_read_on_error=True,
task_identifier="parallel_task2")
self.task.jython_task_manager.get_task_result(
doc_loader_task_2)
# Validation to verify the sync_in_write_errors
# in doc_loader_task_2
failed_docs = doc_loader_task_2.fail
if len(failed_docs.keys()) != 1:
self.log_failure("Exception not seen for docs: %s"
% failed_docs)
valid_exception = self.durability_helper\
.validate_durability_exception(
failed_docs,
SDKException.AmbiguousTimeoutException,
retry_reason=sync_write_in_progress)
if not valid_exception:
self.log_failure("Got invalid exception")
# Revert the introduced error condition
for node in target_nodes:
error_sim[node.ip].revert(self.simulate_error,
bucket_name=self.bucket.name)
# Wait for doc_loading to complete
self.task_manager.get_task_result(doc_loading_task)
self.bucket_util.validate_doc_loading_results(doc_loading_task)
if doc_loading_task.result is False:
self.log_failure("Doc CRUDs failed")
# Validate docs for update success or not
if doc_ops[0] == "update":
for bucket, s_dict in doc_loading_task.loader_spec.items():
for s_name, c_dict in s_dict["scopes"].items():
for c_name, c_meta in c_dict["collections"].items():
for op_type in c_meta:
read_task = self.task.async_load_gen_docs(
self.cluster, self.bucket,
c_meta[op_type]["doc_gen"], "read",
batch_size=self.crud_batch_size,
process_concurrency=1,
timeout_secs=self.sdk_timeout)
self.task_manager.get_task_result(read_task)
for key, doc_info in read_task.success.items():
if doc_info["cas"] != 0 \
and json.loads(str(doc_info["value"])
)["mutated"] != 1:
self.log_failure(
"Update failed for key %s: %s"
% (key, doc_info))
# Validate doc_count per collection
self.validate_test_failure()
self.bucket_util.validate_docs_per_collections_all_buckets()
def test_sub_doc_sync_write_in_progress(self):
"""
Test to simulate sync_write_in_progress error and validate the behavior
This will validate failure in majority of nodes, where durability will
surely fail for all CRUDs
1. Select nodes to simulate the error which will affect the durability
2. Enable the specified error_scenario on the selected nodes
3. Perform individual CRUDs and verify sync_write_in_progress errors
4. Validate the end results
"""
doc_ops = self.input.param("doc_ops", "insert")
shell_conn = dict()
cbstat_obj = dict()
error_sim = dict()
vb_info = dict()
active_vbs = dict()
replica_vbs = dict()
vb_info["init"] = dict()
doc_load_spec = dict()
# Override d_level, error_simulation type based on d_level
self.__get_d_level_and_error_to_simulate()
target_nodes = DurabilityHelper.getTargetNodes(self.cluster,
self.nodes_init,
self.num_nodes_affected)
for node in target_nodes:
shell_conn[node.ip] = RemoteMachineShellConnection(node)
cbstat_obj[node.ip] = Cbstats(shell_conn[node.ip])
vb_info["init"] = dict()
vb_info["init"][node.ip] = cbstat_obj[node.ip].vbucket_seqno(
self.bucket.name)
error_sim[node.ip] = CouchbaseError(self.log, shell_conn[node.ip])
# Fetch affected nodes' vb_num which are of type=replica
active_vbs[node.ip] = cbstat_obj[node.ip].vbucket_list(
self.bucket.name, vbucket_type="active")
replica_vbs[node.ip] = cbstat_obj[node.ip].vbucket_list(
self.bucket.name, vbucket_type="replica")
target_vbs = replica_vbs
if self.durability_level \
== Bucket.DurabilityLevel.MAJORITY_AND_PERSIST_TO_ACTIVE:
target_vbs = active_vbs
target_vbuckets = list()
for target_node in target_nodes:
target_vbuckets += target_vbs[target_node.ip]
else:
target_vbuckets = target_vbs[target_nodes[0].ip]
if len(target_nodes) > 1:
index = 1
while index < len(target_nodes):
target_vbuckets = list(
set(target_vbuckets).intersection(
set(target_vbs[target_nodes[index].ip])
)
)
index += 1
amb_timeout = SDKException.AmbiguousTimeoutException
kv_sync_write_in_progress = \
SDKException.RetryReason.KV_SYNC_WRITE_IN_PROGRESS
doc_not_found_exception = SDKException.DocumentNotFoundException
self.load_data_for_sub_doc_ops()
doc_load_spec["doc_crud"] = dict()
doc_load_spec["subdoc_crud"] = dict()
doc_load_spec["doc_crud"][MetaCrudParams.DocCrud.COMMON_DOC_KEY] \
= "test_collections"
doc_load_spec[MetaCrudParams.TARGET_VBUCKETS] = target_vbuckets
doc_load_spec[MetaCrudParams.DURABILITY_LEVEL] = self.durability_level
doc_load_spec[MetaCrudParams.COLLECTIONS_CONSIDERED_FOR_CRUD] = 5
doc_load_spec[MetaCrudParams.SCOPES_CONSIDERED_FOR_CRUD] = "all"
doc_load_spec[MetaCrudParams.SDK_TIMEOUT] = 60
# Acquire SDK client from the pool for performing doc_ops locally
client = self.sdk_client_pool.get_client_for_bucket(self.bucket)
# Override the crud_batch_size
self.crud_batch_size = 5
# Update mutation spec based on the required doc_operation
if doc_ops == "create":
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.CREATE_PERCENTAGE_PER_COLLECTION] = 1
elif doc_ops in "update":
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.UPDATE_PERCENTAGE_PER_COLLECTION] = 1
elif doc_ops == "delete":
doc_load_spec["doc_crud"][
MetaCrudParams.DocCrud.DELETE_PERCENTAGE_PER_COLLECTION] = 1
elif doc_ops == "insert":
doc_load_spec["subdoc_crud"][
MetaCrudParams.SubDocCrud.INSERT_PER_COLLECTION] = 1
elif doc_ops == "upsert":
doc_load_spec["subdoc_crud"][
MetaCrudParams.SubDocCrud.UPSERT_PER_COLLECTION] = 1
elif doc_ops == "remove":
doc_load_spec["subdoc_crud"][
MetaCrudParams.SubDocCrud.REMOVE_PER_COLLECTION] = 1
# This is to support both sync-write and non-sync-writes
tem_durability = self.durability_level
if self.with_non_sync_writes:
tem_durability = "NONE"
# Perform specified action
for node in target_nodes:
error_sim[node.ip].create(self.simulate_error,
bucket_name=self.bucket.name)
self.sleep(5, "Wait for error simulation to take effect")
# Initialize tasks and store the task objects
doc_loading_task = \
self.bucket_util.run_scenario_from_spec(
self.task,
self.cluster,
self.bucket_util.buckets,
doc_load_spec,
mutation_num=2,
batch_size=1,
async_load=True)
# Start the doc_loader_task
self.sleep(10, "Wait for task_1 CRUDs to reach server")
for bucket, s_dict in doc_loading_task.loader_spec.items():
for s_name, c_dict in s_dict["scopes"].items():
for c_name, c_meta in c_dict["collections"].items():
for op_type in c_meta:
key, _ = c_meta[op_type]["doc_gen"].next()
expected_exception = amb_timeout
retry_reason = kv_sync_write_in_progress
if doc_ops == "create":
expected_exception = doc_not_found_exception
retry_reason = None
for sub_doc_op in [
DocLoading.Bucket.SubDocOps.INSERT,
DocLoading.Bucket.SubDocOps.UPSERT,
DocLoading.Bucket.SubDocOps.REMOVE]:
val = ["my_mutation", "val"]
if sub_doc_op \
== DocLoading.Bucket.SubDocOps.REMOVE:
val = "mutated"
result = client.crud(
sub_doc_op, key, val,
durability=tem_durability,
timeout=2)
if result[0]:
self.log_failure(
"Doc crud succeeded for %s" % op_type)
elif expected_exception \
not in str(result[1][key]["error"]):
self.log_failure(
"Invalid exception for key %s: %s"
% (key, result[1][key]["error"]))
elif retry_reason is not None and \
retry_reason \
not in str(result[1][key]["error"]):
self.log_failure(
"Retry reason missing for key %s: %s"
% (key, result[1][key]["error"]))
# Revert the introduced error condition
for node in target_nodes:
error_sim[node.ip].revert(self.simulate_error,
bucket_name=self.bucket.name)
# Wait for doc_loader_task_1 to complete
self.task.jython_task_manager.get_task_result(doc_loading_task)
self.bucket_util.validate_doc_loading_results(doc_loading_task)
if doc_loading_task.result is False:
self.log_failure("Doc CRUDs failed")
# Validate docs for update success or not
if doc_ops == "update":
for bucket, s_dict in doc_loading_task.loader_spec.items():
for s_name, c_dict in s_dict["scopes"].items():
for c_name, c_meta in c_dict["collections"].items():
for op_type in c_meta:
c_meta[op_type]["doc_gen"].reset()
read_task = self.task.async_load_gen_docs(
self.cluster, self.bucket,
c_meta[op_type]["doc_gen"], "read",
batch_size=self.crud_batch_size,
process_concurrency=1,
timeout_secs=self.sdk_timeout)
self.task_manager.get_task_result(read_task)
for key, doc_info in read_task.success.items():
if doc_info["cas"] != 0 and \
json.loads(str(doc_info["value"])
)["mutated"] != 2:
self.log_failure(
"Update failed for key %s: %s"
% (key, doc_info))
# Release the acquired SDK client
self.sdk_client_pool.release_client(client)
# Verify initial doc load count
self.bucket_util._wait_for_stats_all_buckets()
self.bucket_util.validate_docs_per_collections_all_buckets()
self.validate_test_failure()
|
antonio-mastropaolo/labeling-machine | data/cloned/1c-syntax/bsl-language-server/src/main/java/com/github/_1c_syntax/bsl/languageserver/diagnostics/PublicMethodsDescriptionDiagnostic.java | <reponame>antonio-mastropaolo/labeling-machine<filename>data/cloned/1c-syntax/bsl-language-server/src/main/java/com/github/_1c_syntax/bsl/languageserver/diagnostics/PublicMethodsDescriptionDiagnostic.java
/*
* This file is a part of BSL Language Server.
*
* Copyright (c) 2018-2021
* <NAME> <<EMAIL>>, <NAME> <<EMAIL>> and contributors
*
* SPDX-License-Identifier: LGPL-3.0-or-later
*
* BSL Language Server is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3.0 of the License, or (at your option) any later version.
*
* BSL Language Server is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BSL Language Server.
*/
package com.github._1c_syntax.bsl.languageserver.diagnostics;
import com.github._1c_syntax.bsl.languageserver.context.symbol.MethodSymbol;
import com.github._1c_syntax.bsl.languageserver.context.symbol.Symbol;
import com.github._1c_syntax.bsl.languageserver.diagnostics.metadata.DiagnosticMetadata;
import com.github._1c_syntax.bsl.languageserver.diagnostics.metadata.DiagnosticParameter;
import com.github._1c_syntax.bsl.languageserver.diagnostics.metadata.DiagnosticSeverity;
import com.github._1c_syntax.bsl.languageserver.diagnostics.metadata.DiagnosticTag;
import com.github._1c_syntax.bsl.languageserver.diagnostics.metadata.DiagnosticType;
import com.github._1c_syntax.bsl.parser.BSLParser;
import com.github._1c_syntax.utils.CaseInsensitivePattern;
import org.antlr.v4.runtime.tree.ParseTree;
import org.eclipse.lsp4j.SymbolKind;
import java.util.regex.Pattern;
@DiagnosticMetadata(
type = DiagnosticType.CODE_SMELL,
severity = DiagnosticSeverity.INFO,
minutesToFix = 1,
tags = {
DiagnosticTag.STANDARD,
DiagnosticTag.BRAINOVERLOAD,
DiagnosticTag.BADPRACTICE
}
)
public class PublicMethodsDescriptionDiagnostic extends AbstractVisitorDiagnostic {
private static final Pattern API_REGION_NAME = CaseInsensitivePattern.compile(
"^(?:ПрограммныйИнтерфейс|Public)$"
);
private static final boolean DEFAULT_CHECK_ALL_REGION = false;
@DiagnosticParameter(
type = Boolean.class,
defaultValue = "" + DEFAULT_CHECK_ALL_REGION
)
private boolean checkAllRegion = DEFAULT_CHECK_ALL_REGION;
@Override
public ParseTree visitSub(BSLParser.SubContext ctx) {
documentContext.getSymbolTree().getMethodSymbol(ctx).ifPresent((MethodSymbol methodSymbol) -> {
if (methodSymbol.isExport() && methodSymbol.getDescription().isEmpty()) {
if (checkAllRegion) {
diagnosticStorage.addDiagnostic(methodSymbol.getSubNameRange());
} else {
methodSymbol.getRootParent(SymbolKind.Namespace).ifPresent((Symbol rootRegion) -> {
if (isAPIRegion(rootRegion)) {
diagnosticStorage.addDiagnostic(methodSymbol.getSubNameRange());
}
});
}
}
});
return ctx;
}
private static boolean isAPIRegion(Symbol symbol) {
return API_REGION_NAME.matcher(symbol.getName()).matches();
}
}
|
jesperancinha/jeorg-java-11-test-drives | jeorg-java-11-mastery/jeorg-java-11-mastery-4/jeorg-mastery-4-1/src/main/java/org/jesperancinha/java11/mastery4dot1/states/JFKGovernment.java | package org.jesperancinha.java11.mastery4dot1.states;
public class JFKGovernment implements HappyBirthday {
public boolean civilRightsBill = false;
}
|
nateglims/amazon-freertos | vendors/microchip/harmony/v2.05/framework/driver/spi/src/drv_spi_variant_mapping.h | /*******************************************************************************
SPI Driver Feature Variant Implementations
Company:
Microchip Technology Inc.
File Name:
drv_spi_variant_mapping.h
Summary:
SPI driver feature variant implementations.
Description:
This file implements the functions which differ based on different devices
and various implementations of the same feature.
*******************************************************************************/
//DOM-IGNORE-BEGIN
/*******************************************************************************
Copyright (c) 2013 released Microchip Technology Inc. All rights reserved.
Microchip licenses to you the right to use, modify, copy and distribute
Software only when embedded on a Microchip microcontroller or digital signal
controller that is integrated into your product or third party product
(pursuant to the sub-license terms in the accompanying license agreement).
You should refer to the license agreement accompanying this Software for
additional information regarding your rights and obligations.
SOFTWARE AND DOCUMENTATION ARE PROVIDED AS IS WITHOUT WARRANTY OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION, ANY WARRANTY OF
MERCHANTABILITY, TITLE, NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
IN NO EVENT SHALL MICROCHIP OR ITS LICENSORS BE LIABLE OR OBLIGATED UNDER
CONTRACT, NEGLIGENCE, STRICT LIABILITY, CONTRIBUTION, BREACH OF WARRANTY, OR
OTHER LEGAL EQUITABLE THEORY ANY DIRECT OR INDIRECT DAMAGES OR EXPENSES
INCLUDING BUT NOT LIMITED TO ANY INCIDENTAL, SPECIAL, INDIRECT, PUNITIVE OR
CONSEQUENTIAL DAMAGES, LOST PROFITS OR LOST DATA, COST OF PROCUREMENT OF
SUBSTITUTE GOODS, TECHNOLOGY, SERVICES, OR ANY CLAIMS BY THIRD PARTIES
(INCLUDING BUT NOT LIMITED TO ANY DEFENSE THEREOF), OR OTHER SIMILAR COSTS.
*******************************************************************************/
//DOM-IGNORE-END
#ifndef _DRV_SPI_VARIANT_MAPPING_H
#define _DRV_SPI_VARIANT_MAPPING_H
#if defined __PIC32MX__
/* Interrupt trigger for PIC32MX320 series of devices, which doesn't have persistent interrupt */
#if ((__PIC32_FEATURE_SET == 320) || (__PIC32_FEATURE_SET == 340) || (__PIC32_FEATURE_SET == 360) || (__PIC32_FEATURE_SET == 420) || (__PIC32_FEATURE_SET == 440) || (__PIC32_FEATURE_SET == 460))
#define _DRV_SPI_INTERRUPT_TRIGGER(txInterruptSource) if (qEmpty)\
{\
SYS_INT_SourceStatusSet(txInterruptSource);\
}
#define _DRV_SPI_QUEUE_STATUS_CHECK(queue) volatile bool qEmpty;\
if (DRV_SPI_SYS_QUEUE_IsEmpty(queue))\
{\
qEmpty= true;\
}
/* No interrupt trigger is required for the devices which have persistent interrupts */
#else
#define _DRV_SPI_INTERRUPT_TRIGGER(txInterruptSource)
#define _DRV_SPI_QUEUE_STATUS_CHECK(queue)
#endif
#else
#define _DRV_SPI_INTERRUPT_TRIGGER(txInterruptSource)
#define _DRV_SPI_QUEUE_STATUS_CHECK(queue)
#endif
#endif //_DRV_SPI_VARIANT_MAPPING_H
/*******************************************************************************
End of File
*/
|
razerx100/Terra | includes/VK/CommandPoolManager.hpp | #ifndef COMMAND_POOL_MANAGER_HPP_
#define COMMAND_POOL_MANAGER_HPP_
#include <vulkan/vulkan.hpp>
#include <vector>
class CommandPoolManager {
public:
CommandPoolManager(
VkDevice device, size_t queueIndex, std::uint32_t bufferCount
);
~CommandPoolManager() noexcept;
void Reset(size_t bufferIndex);
void Close(size_t bufferIndex);
[[nodiscard]]
VkCommandBuffer GetCommandBuffer(size_t bufferIndex) const noexcept;
private:
VkDevice m_deviceRef;
VkCommandPool m_commandPool;
std::vector<VkCommandBuffer> m_commandBuffers;
VkCommandBufferBeginInfo m_beginInfo;
};
#endif
|
lo-rodriguez/dojogroup | src/main/java/com/dojogrouppty/catalogs/SystemCodesRepository.java | package com.dojogrouppty.catalogs;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
@Repository
public interface SystemCodesRepository extends JpaRepository<SystemCodes, Long> {
@Query(nativeQuery = true, value="select idsystem_codes,system_code,system_cod_description,system_cod_group,sub_group_description from system_codes where system_cod_group=:group")
List <SystemCodes> getSystemCodesByGroup(@Param("group") String group);
@Query(nativeQuery = true, value="select CONCAT('PROD',LPAD(COUNT(1)+1,6,'0')) from system_codes where system_cod_group='PRODUCTS'AND system_code LIKE 'PROD%'")
String getSecuenceProduct();
@Query(nativeQuery = true, value="select CONCAT(SUBSTRING(:subGroup,1,4),LPAD(COUNT(1)+1,6,'0')) \r\n" +
"from system_codes \r\n" +
"where system_cod_group='PRODUCTS'\r\n" +
"AND system_code LIKE CONCAT(SUBSTRING(:subGroup,1,4),'%');")
String getSecuenceProductBySubGroup(@Param("subGroup")String subGroup);
} |
NormanDunbar/xtc68 | support/INCLUDE_libgen.h | <filename>support/INCLUDE_libgen.h
#ifndef _LIBGEN_H
#define _LIBGEN_H
#ifndef _SYS_TYPES_H
#include <sys/types.h>
#endif
#ifndef _STDIO_H
#include <stdio.h>
#endif
#ifdef __STDC__
#define _P_(params) params
#else
#define _P_(params)
#endif
#define basename _BaseName
#define bgets _Bgets
#define bufsplit _Bufsplit
#define copylist _Copylist
#define dirname _Dirname
#define gmatch _Gmatch
#define pathfind _Pathfind
#define strccpy _Strccpy
#define strcadd _Strcadd
#define strecpy _Strecpy
#define streadd _Streadd
#define stresize _Stresize
#define strfind _Strfind
#define strrspn _Strrspn
#define strtrns _Strtrns
char * basename _P_((char *));
char * bgets _P_((char *,size_t,FILE *,const char *));
size_t bufsplit _P_((char *,size_t,char **));
char * copylist _P_((const char *,size_t *));
char * dirname _P_((char *));
int gmatch _P_((const char *,const char *));
char * pathfind _P_((const char *,const char *,const char *));
char * strccpy _P_((char *,const char *));
char * strcadd _P_((char *,const char *));
char * strecpy _P_((char *,const char *,const char *));
char * streadd _P_((char *,const char *,const char *));
int stresize _P_((const char *,const char *));
int strfind _P_((const char *,const char *));
char * strrspn _P_((const char *,const char *));
char * strtrns _P_((const char *,const char *,const char *,char *));
#if 0
#define isencrypt _Isencrypt
int isencrypt _P_((const char *,size_t));
#define mkdirp _Mkdirp
int mkdirp _P_((const char *,mode_t));
#define rmdirp _Rmdirp
int rmdirp _P_((char *,char *));
#define p2open _P2open
int p2open _P_((const char *,FILE **));
#define p2close _P2close
int p2close _P_((FILE **));
#define regcmp _Regcmp
char * regcmp _P_((const char *,char *,...));
#define regex _Regex
char * regex _P_((const char *,const char *,...));
#endif
#ifdef __LIBRARY__
int _CescInt _P_((int));
int _HexInt _P_((int));
int _OctInt _P_((int));
int _IntCesc _P_((int));
char * __Streadd _P_((char *,const char *,const char *,const char *));
int __Stresize _P_((const char *,const char *,const char * ));
#endif
#undef _P_
#endif
|
jblancett/tsuru | iaas/ec2/iaas.go | <reponame>jblancett/tsuru
// Copyright 2015 tsuru authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ec2
import (
"fmt"
"reflect"
"strconv"
"strings"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/service/ec2"
"github.com/tsuru/monsterqueue"
"github.com/tsuru/tsuru/iaas"
"github.com/tsuru/tsuru/log"
"github.com/tsuru/tsuru/queue"
)
const defaultRegion = "us-east-1"
func init() {
iaas.RegisterIaasProvider("ec2", newEC2IaaS)
}
type EC2IaaS struct {
base iaas.UserDataIaaS
}
func newEC2IaaS(name string) iaas.IaaS {
return &EC2IaaS{base: iaas.UserDataIaaS{NamedIaaS: iaas.NamedIaaS{BaseIaaSName: "ec2", IaaSName: name}}}
}
func (i *EC2IaaS) createEC2Handler(regionOrEndpoint string) (*ec2.EC2, error) {
keyId, err := i.base.GetConfigString("key-id")
if err != nil {
return nil, err
}
secretKey, err := i.base.GetConfigString("secret-key")
if err != nil {
return nil, err
}
var region, endpoint string
if strings.HasPrefix(regionOrEndpoint, "http") {
endpoint = regionOrEndpoint
region = defaultRegion
} else {
region = regionOrEndpoint
}
config := aws.Config{
Credentials: credentials.NewStaticCredentials(keyId, secretKey, ""),
Region: region,
Endpoint: endpoint,
}
return ec2.New(&config), nil
}
func (i *EC2IaaS) waitForDnsName(ec2Inst *ec2.EC2, instance *ec2.Instance) (*ec2.Instance, error) {
rawWait, _ := i.base.GetConfigString("wait-timeout")
maxWaitTime, _ := strconv.Atoi(rawWait)
if maxWaitTime == 0 {
maxWaitTime = 300
}
q, err := queue.Queue()
if err != nil {
return nil, err
}
taskName := fmt.Sprintf("ec2-wait-machine-%s", i.base.IaaSName)
waitDuration := time.Duration(maxWaitTime) * time.Second
job, err := q.EnqueueWait(taskName, monsterqueue.JobParams{
"region": ec2Inst.Config.Region,
"endpoint": ec2Inst.Config.Endpoint,
"machineId": *instance.InstanceID,
"timeout": maxWaitTime,
}, waitDuration)
if err != nil {
if err == monsterqueue.ErrQueueWaitTimeout {
return nil, fmt.Errorf("ec2: time out after %v waiting for instance %s to start", waitDuration, *instance.InstanceID)
}
return nil, err
}
result, err := job.Result()
if err != nil {
return nil, err
}
instance.PublicDNSName = aws.String(result.(string))
return instance, nil
}
func (i *EC2IaaS) Initialize() error {
q, err := queue.Queue()
if err != nil {
return err
}
return q.RegisterTask(&ec2WaitTask{iaas: i})
}
func (i *EC2IaaS) Describe() string {
return `EC2 IaaS required params:
image=<image id> Image AMI ID
type=<instance type> Your template uuid
Optional params:
region=<region> Chosen region, defaults to us-east-1
securityGroup=<group> Chosen security group
keyName=<key name> Key name for machine
`
}
func (i *EC2IaaS) DeleteMachine(m *iaas.Machine) error {
regionOrEndpoint := getRegionOrEndpoint(m.CreationParams, false)
if regionOrEndpoint == "" {
return fmt.Errorf("region or endpoint creation param required")
}
ec2Inst, err := i.createEC2Handler(regionOrEndpoint)
if err != nil {
return err
}
input := ec2.TerminateInstancesInput{InstanceIDs: []*string{&m.Id}}
_, err = ec2Inst.TerminateInstances(&input)
return err
}
type invalidFieldError struct {
fieldName string
convertError error
}
func (err *invalidFieldError) Error() string {
return fmt.Sprintf("invalid value for the field %q: %s", err.fieldName, err.convertError)
}
func (i *EC2IaaS) buildRunInstancesOptions(params map[string]string) (ec2.RunInstancesInput, error) {
result := ec2.RunInstancesInput{
MaxCount: aws.Long(1),
MinCount: aws.Long(1),
}
forbiddenFields := []string{
"maxcount", "mincount", "dryrun", "blockdevicemappings",
"iaminstanceprofile", "monitoring", "networkinterfaces",
"placement",
}
aliases := map[string]string{
"image": "imageid",
"type": "instancetype",
"securitygroup": "securitygroups",
"ebs-optimized": "ebsoptimized",
}
refType := reflect.TypeOf(result)
refValue := reflect.ValueOf(&result)
for key, value := range params {
field, ok := refType.FieldByNameFunc(func(name string) bool {
lowerName := strings.ToLower(name)
for _, field := range forbiddenFields {
if lowerName == field {
return false
}
}
lowerKey := strings.ToLower(key)
if aliased, ok := aliases[lowerKey]; ok {
lowerKey = aliased
}
return lowerName == lowerKey
})
if !ok {
continue
}
fieldType := field.Type
fieldValue := refValue.Elem().FieldByIndex(field.Index)
if !fieldValue.IsValid() || !fieldValue.CanSet() {
continue
}
switch fieldType.Kind() {
case reflect.Ptr:
switch fieldType.Elem().Kind() {
case reflect.String:
copy := value
fieldValue.Set(reflect.ValueOf(©))
case reflect.Int64:
intValue, err := strconv.ParseInt(value, 10, 64)
if err != nil {
return result, &invalidFieldError{
fieldName: key,
convertError: err,
}
}
fieldValue.Set(reflect.ValueOf(&intValue))
case reflect.Bool:
boolValue, err := strconv.ParseBool(value)
if err != nil {
return result, &invalidFieldError{
fieldName: key,
convertError: err,
}
}
fieldValue.Set(reflect.ValueOf(&boolValue))
}
case reflect.Slice:
parts := strings.Split(value, ",")
values := make([]*string, len(parts))
for i, part := range parts {
values[i] = aws.String(part)
}
fieldValue.Set(reflect.ValueOf(values))
}
}
return result, nil
}
func (i *EC2IaaS) CreateMachine(params map[string]string) (*iaas.Machine, error) {
regionOrEndpoint := getRegionOrEndpoint(params, true)
userData, err := i.base.ReadUserData()
if err != nil {
return nil, err
}
options, err := i.buildRunInstancesOptions(params)
if err != nil {
return nil, err
}
options.UserData = aws.String(userData)
if options.ImageID == nil || *options.ImageID == "" {
return nil, fmt.Errorf("the parameter %q is required", "imageid")
}
if options.InstanceType == nil || *options.InstanceType == "" {
return nil, fmt.Errorf("the parameter %q is required", "instancetype")
}
ec2Inst, err := i.createEC2Handler(regionOrEndpoint)
if err != nil {
return nil, err
}
resp, err := ec2Inst.RunInstances(&options)
if err != nil {
return nil, err
}
if len(resp.Instances) == 0 {
return nil, fmt.Errorf("no instance created")
}
runInst := resp.Instances[0]
if tags, ok := params["tags"]; ok {
var ec2Tags []*ec2.Tag
tagList := strings.Split(tags, ",")
ec2Tags = make([]*ec2.Tag, 0, len(tagList))
for _, tag := range tagList {
if strings.Contains(tag, ":") {
parts := strings.SplitN(tag, ":", 2)
ec2Tags = append(ec2Tags, &ec2.Tag{
Key: aws.String(parts[0]),
Value: aws.String(parts[1]),
})
}
}
if len(ec2Tags) > 0 {
input := ec2.CreateTagsInput{
Resources: []*string{runInst.InstanceID},
Tags: ec2Tags,
}
_, err = ec2Inst.CreateTags(&input)
if err != nil {
log.Errorf("failed to tag EC2 instance: %s", err)
}
}
}
instance, err := i.waitForDnsName(ec2Inst, runInst)
if err != nil {
return nil, err
}
machine := iaas.Machine{
Id: *instance.InstanceID,
Status: *instance.State.Name,
Address: *instance.PublicDNSName,
}
return &machine, nil
}
func getRegionOrEndpoint(params map[string]string, useDefault bool) string {
regionOrEndpoint := params["endpoint"]
if regionOrEndpoint == "" {
regionOrEndpoint = params["region"]
if regionOrEndpoint == "" && useDefault {
regionOrEndpoint = defaultRegion
}
}
return regionOrEndpoint
}
|
HELIX-GR/core | web/src/main/frontend/js/helix-core/service/index.js | <filename>web/src/main/frontend/js/helix-core/service/index.js
import { default as citation } from './citation';
import { default as i18n } from './i18n';
import { default as search } from './search';
import { default as user } from './user';
const api = {
citation,
i18n,
search,
user,
};
export default api;
|
Gei0r/cquery | index_tests/vars/deduce_auto_type.cc | <filename>index_tests/vars/deduce_auto_type.cc
class Foo {};
void f() {
auto x = new Foo();
auto* y = new Foo();
}
/*
OUTPUT:
{
"includes": [],
"skipped_by_preprocessor": [],
"types": [{
"id": 0,
"usr": 15041163540773201510,
"detailed_name": "Foo",
"short_name": "Foo",
"kind": 5,
"declarations": [],
"spell": "1:7-1:10|-1|1|2",
"extent": "1:1-1:13|-1|1|0",
"bases": [],
"derived": [],
"types": [],
"funcs": [],
"vars": [],
"instances": [0, 1],
"uses": ["3:16-3:19|-1|1|4", "4:17-4:20|-1|1|4"]
}],
"funcs": [{
"id": 0,
"usr": 880549676430489861,
"detailed_name": "void f()",
"short_name": "f",
"kind": 12,
"storage": 1,
"declarations": [],
"spell": "2:6-2:7|-1|1|2",
"extent": "2:1-5:2|-1|1|0",
"bases": [],
"derived": [],
"vars": [0, 1],
"uses": [],
"callees": []
}],
"vars": [{
"id": 0,
"usr": 9275666070987716270,
"detailed_name": "Foo *x",
"short_name": "x",
"declarations": [],
"spell": "3:8-3:9|0|3|2",
"extent": "3:3-3:21|0|3|0",
"type": 0,
"uses": [],
"kind": 13,
"storage": 1
}, {
"id": 1,
"usr": 16202433437488621027,
"detailed_name": "Foo *y",
"short_name": "y",
"declarations": [],
"spell": "4:9-4:10|0|3|2",
"extent": "4:3-4:22|0|3|0",
"type": 0,
"uses": [],
"kind": 13,
"storage": 1
}]
}
*/
|
solnic/rom-relation | spec/unit/rom/relation/sort_by_spec.rb | # encoding: utf-8
require 'spec_helper'
describe Relation, '#sort_by' do
include_context 'Relation'
share_examples_for 'sorted relation' do
specify do
should eql([jack, jade, jane, john])
end
end
context 'with a list of attribute names' do
subject { relation.sort_by([:name]).to_a }
it_behaves_like 'sorted relation'
end
context 'with a block' do
subject { relation.sort_by { |r| [r.name] }.to_a }
it_behaves_like 'sorted relation'
end
end
|
dirkmueller/Rocket.Chat | client/components/pageNotFound/PageNotFound.js | import { Button, ButtonGroup } from '@rocket.chat/fuselage';
import React from 'react';
import { useTranslation } from '../providers/TranslationProvider';
import { useWipeInitialPageLoading } from '../../hooks/useWipeInitialPageLoading';
import { ConnectionStatusAlert } from '../connectionStatus/ConnectionStatusAlert';
import { useRoute } from '../providers/RouterProvider';
import './PageNotFound.css';
export function PageNotFound() {
useWipeInitialPageLoading();
const t = useTranslation();
const goToHome = useRoute('home');
const handleGoToPreviousPageClick = () => {
window.history.back();
};
const handleGoHomeClick = () => {
goToHome();
};
return <>
<ConnectionStatusAlert />
<section className='PageNotFound'>
<span className='PageNotFound__404'>404</span>
<span className='PageNotFound__message'>{t('Oops_page_not_found')}</span>
<span className='PageNotFound__description'>{t('Sorry_page_you_requested_does_not_exist_or_was_deleted')}</span>
<div className='PageNotFound__actions'>
<ButtonGroup>
<Button type='button' primary onClick={handleGoToPreviousPageClick}>{t('Return_to_previous_page')}</Button>
<Button type='button' primary onClick={handleGoHomeClick}>{t('Return_to_home')}</Button>
</ButtonGroup>
</div>
</section>
</>;
}
|
builder08/enigma2-plugins | weatherplugin/src/plugin.py | <reponame>builder08/enigma2-plugins
# -*- coding: utf-8 -*-
#
# WeatherPlugin E2
#
# Coded by Dr.Best (c) 2012
# Support: www.dreambox-tools.info
# E-Mail: <EMAIL>
#
# This plugin is open source but it is NOT free software.
#
# This plugin may only be distributed to and executed on hardware which
# is licensed by Dream Multimedia GmbH.
# In other words:
# It's NOT allowed to distribute any parts of this plugin or its source code in ANY way
# to hardware which is NOT licensed by Dream Multimedia GmbH.
# It's NOT allowed to execute this plugin and its source code or even parts of it in ANY way
# on hardware which is NOT licensed by Dream Multimedia GmbH.
#
# If you want to use or modify the code or parts of it,
# you have to keep MY license and inform me about the modifications by mail.
#
# for localized messages
from __future__ import absolute_import
from . import _
from Plugins.Plugin import PluginDescriptor
from Screens.Screen import Screen
from Components.ActionMap import ActionMap
from Components.Sources.StaticText import StaticText
from Components.Pixmap import Pixmap
from enigma import ePicLoad, eRect, eSize, gPixmapPtr
from Components.AVSwitch import AVSwitch
from Components.config import ConfigSubsection, ConfigSubList, ConfigInteger, config
from .setup import initConfig, MSNWeatherPluginEntriesListConfigScreen
from .MSNWeather import MSNWeather
import time
try:
from Components.WeatherMSN import weathermsn
WeatherMSNComp = weathermsn
except:
WeatherMSNComp = None
config.plugins.WeatherPlugin = ConfigSubsection()
config.plugins.WeatherPlugin.entrycount = ConfigInteger(0)
config.plugins.WeatherPlugin.Entry = ConfigSubList()
initConfig()
def main(session,**kwargs):
session.open(MSNWeatherPlugin)
def Plugins(**kwargs):
list = [PluginDescriptor(name=_("Weather Plugin"), description=_("Show Weather Forecast"), where = [PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EXTENSIONSMENU], icon = "weather.png", fnc=main)]
return list
class MSNWeatherPlugin(Screen):
skin = """
<screen name="MSNWeatherPlugin" position="center,center" size="664,340" title="Weather Plugin">
<widget render="Label" source="caption" position="10,20" zPosition="1" size="600,28" font="Regular;24" transparent="1"/>
<widget render="Label" source="observationtime" position="374,45" zPosition="1" size="280,20" font="Regular;14" transparent="1" halign="right" />
<widget render="Label" source="observationpoint" position="204,65" zPosition="1" size="450,40" font="Regular;14" transparent="1" halign="right" />
<widget name="currenticon" position="10,95" zPosition="1" size="55,45" alphatest="blend"/>
<widget render="Label" source="currentTemp" position="90,95" zPosition="1" size="100,23" font="Regular;22" transparent="1"/>
<widget render="Label" source="feelsliketemp" position="90,120" zPosition="1" size="155,40" font="Regular;14" transparent="1"/>
<widget render="Label" source="condition" position="270,95" zPosition="1" size="300,20" font="Regular;18" transparent="1"/>
<widget render="Label" source="wind_condition" position="270,115" zPosition="1" size="300,20" font="Regular;18" transparent="1"/>
<widget render="Label" source="humidity" position="270,135" zPosition="1" size="300,20" font="Regular;18" valign="bottom" transparent="1"/>
<widget render="Label" source="weekday1" position="35,170" zPosition="1" size="105,40" halign="center" valign="center" font="Regular;18" transparent="1"/>
<widget name="weekday1_icon" position="60,215" zPosition="1" size="55,45" alphatest="blend"/>
<widget render="Label" source="weekday1_temp" position="35,270" zPosition="1" size="105,60" halign="center" valign="bottom" font="Regular;16" transparent="1"/>
<widget render="Label" source="weekday2" position="155,170" zPosition="1" size="105,40" halign="center" valign="center" font="Regular;18" transparent="1"/>
<widget name="weekday2_icon" position="180,215" zPosition="1" size="55,45" alphatest="blend"/>
<widget render="Label" source="weekday2_temp" position="155,270" zPosition="1" size="105,60" halign="center" valign="bottom" font="Regular;16" transparent="1"/>
<widget render="Label" source="weekday3" position="275,170" zPosition="1" size="105,40" halign="center" valign="center" font="Regular;18" transparent="1"/>
<widget name="weekday3_icon" position="300,215" zPosition="1" size="55,45" alphatest="blend"/>
<widget render="Label" source="weekday3_temp" position="275,270" zPosition="1" size="105,60" halign="center" valign="bottom" font="Regular;16" transparent="1"/>
<widget render="Label" source="weekday4" position="395,170" zPosition="1" size="105,40" halign="center" valign="center" font="Regular;18" transparent="1"/>
<widget name="weekday4_icon" position="420,215" zPosition="1" size="55,45" alphatest="blend"/>
<widget render="Label" source="weekday4_temp" position="395,270" zPosition="1" size="105,60" halign="center" valign="bottom" font="Regular;16" transparent="1"/>
<widget render="Label" source="weekday5" position="515,170" zPosition="1" size="105,40" halign="center" valign="center" font="Regular;18" transparent="1"/>
<widget name="weekday5_icon" position="540,215" zPosition="1" size="55,45" alphatest="blend"/>
<widget render="Label" source="weekday5_temp" position="515,270" zPosition="1" size="105,60" halign="center" valign="bottom" font="Regular;16" transparent="1"/>
<widget render="Label" source="statustext" position="0,0" zPosition="1" size="664,340" font="Regular;20" halign="center" valign="center" transparent="1"/>
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self.title = _("Weather Plugin")
self["actions"] = ActionMap(["SetupActions", "DirectionActions"],
{
"cancel": self.close,
"menu": self.config,
"right": self.nextItem,
"left": self.previousItem,
"info": self.showWebsite
}, -1)
self["statustext"] = StaticText()
self["currenticon"] = WeatherIcon()
self["caption"] = StaticText()
self["currentTemp"] = StaticText()
self["condition"] = StaticText()
self["wind_condition"] = StaticText()
self["humidity"] = StaticText()
self["observationtime"] = StaticText()
self["observationpoint"] = StaticText()
self["feelsliketemp"] = StaticText()
i = 1
while i <= 5:
self["weekday%s" % i] = StaticText()
self["weekday%s_icon" %i] = WeatherIcon()
self["weekday%s_temp" % i] = StaticText()
i += 1
del i
self.weatherPluginEntryIndex = -1
self.weatherPluginEntryCount = config.plugins.WeatherPlugin.entrycount.value
if self.weatherPluginEntryCount >= 1:
self.weatherPluginEntry = config.plugins.WeatherPlugin.Entry[0]
self.weatherPluginEntryIndex = 1
else:
self.weatherPluginEntry = None
self.webSite = ""
self.weatherData = None
self.onLayoutFinish.append(self.startRun)
self.onClose.append(self.__onClose)
def __onClose(self):
if self.weatherData is not None:
self.weatherData.cancel()
def startRun(self):
if self.weatherPluginEntry is not None:
self["statustext"].text = _("Getting weather information...")
if self.weatherData is not None:
self.weatherData.cancel()
self.weatherData = None
self.weatherData = MSNWeather()
self.weatherData.getWeatherData(self.weatherPluginEntry.degreetype.value, self.weatherPluginEntry.weatherlocationcode.value, self.weatherPluginEntry.city.value, self.getWeatherDataCallback, self.showIcon)
else:
self["statustext"].text = _("No locations defined...\nPress 'Menu' to do that.")
def nextItem(self):
if self.weatherPluginEntryCount != 0:
if self.weatherPluginEntryIndex < self.weatherPluginEntryCount:
self.weatherPluginEntryIndex = self.weatherPluginEntryIndex + 1
else:
self.weatherPluginEntryIndex = 1
self.setItem()
def previousItem(self):
if self.weatherPluginEntryCount != 0:
if self.weatherPluginEntryIndex >= 2:
self.weatherPluginEntryIndex = self.weatherPluginEntryIndex - 1
else:
self.weatherPluginEntryIndex = self.weatherPluginEntryCount
self.setItem()
def setItem(self):
self.weatherPluginEntry = config.plugins.WeatherPlugin.Entry[self.weatherPluginEntryIndex-1]
self.clearFields()
self.startRun()
def clearFields(self):
self["caption"].text = ""
self["currentTemp"].text = ""
self["condition"].text = ""
self["wind_condition"].text = ""
self["humidity"].text = ""
self["observationtime"].text = ""
self["observationpoint"].text = ""
self["feelsliketemp"].text = ""
self["currenticon"].hide()
self.webSite = ""
i = 1
while i <= 5:
self["weekday%s" % i].text = ""
self["weekday%s_icon" %i].hide()
self["weekday%s_temp" % i].text = ""
i += 1
def showIcon(self, index, filename):
if index != -1:
self["weekday%s_icon" % index].updateIcon(filename)
self["weekday%s_icon" % index].show()
else:
self["currenticon"].updateIcon(filename)
self["currenticon"].show()
def getWeatherDataCallback(self, result, errortext):
self["statustext"].text = ""
if result == MSNWeather.ERROR:
self.error(errortext)
else:
self["caption"].text = self.weatherData.city
self.webSite = self.weatherData.url
for weatherData in list(self.weatherData.weatherItems.items()):
item = weatherData[1]
if weatherData[0] == "-1": # current
self["currentTemp"].text = "%s°%s" % (item.temperature, self.weatherData.degreetype)
self["condition"].text = item.skytext
self["humidity"].text = _("Humidity: %s %%") % item.humidity
self["wind_condition"].text = item.winddisplay
c = time.strptime(item.observationtime, "%H:%M:%S")
self["observationtime"].text = _("Observation time: %s") % time.strftime("%H:%M", c)
self["observationpoint"].text = _("Observation point: %s") % item.observationpoint
self["feelsliketemp"].text = _("Feels like %s") % item.feelslike + "°" + self.weatherData.degreetype
else:
index = weatherData[0]
c = time.strptime(item.date, "%Y-%m-%d")
self["weekday%s" % index].text = "%s\n%s" % (item.day, time.strftime("%d. %b", c))
lowTemp = item.low
highTemp = item.high
self["weekday%s_temp" % index].text = "%s°%s|%s°%s\n%s" % (highTemp, self.weatherData.degreetype, lowTemp, self.weatherData.degreetype, item.skytextday)
if self.weatherPluginEntryIndex == 1 and WeatherMSNComp is not None:
WeatherMSNComp.updateWeather(self.weatherData, result, errortext)
def config(self):
self.session.openWithCallback(self.setupFinished, MSNWeatherPluginEntriesListConfigScreen)
def setupFinished(self, index, entry = None):
self.weatherPluginEntryCount = config.plugins.WeatherPlugin.entrycount.value
if self.weatherPluginEntryCount >= 1:
if entry is not None:
self.weatherPluginEntry = entry
self.weatherPluginEntryIndex = index + 1
if self.weatherPluginEntry is None:
self.weatherPluginEntry = config.plugins.WeatherPlugin.Entry[0]
self.weatherPluginEntryIndex = 1
else:
self.weatherPluginEntry = None
self.weatherPluginEntryIndex = -1
self.clearFields()
self.startRun()
def error(self, errortext):
self.clearFields()
self["statustext"].text = errortext
def showWebsite(self):
try:
from Plugins.Extensions.Browser.Browser import Browser
if self.webSite:
self.session.open(Browser, config.plugins.WebBrowser.fullscreen.value, self.webSite, False)
except: pass # I dont care if browser is installed or not...
class WeatherIcon(Pixmap):
def __init__(self):
Pixmap.__init__(self)
self.IconFileName = ""
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.paintIconPixmapCB)
def onShow(self):
Pixmap.onShow(self)
sc = AVSwitch().getFramebufferScale()
self._aspectRatio = eSize(sc[0], sc[1])
self._scaleSize = self.instance.size()
self.picload.setPara((self._scaleSize.width(), self._scaleSize.height(), sc[0], sc[1], True, 2, '#ff000000'))
def paintIconPixmapCB(self, picInfo=None):
ptr = self.picload.getData()
if ptr is not None:
pic_scale_size = eSize()
# To be added in the future:
if 'scale' in eSize.__dict__ and self._scaleSize.isValid() and self._aspectRatio.isValid():
pic_scale_size = ptr.size().scale(self._scaleSize, self._aspectRatio)
# To be removed in the future:
elif 'scaleSize' in gPixmapPtr.__dict__:
pic_scale_size = ptr.scaleSize()
if pic_scale_size.isValid():
pic_scale_width = pic_scale_size.width()
pic_scale_height = pic_scale_size.height()
dest_rect = eRect(0, 0, pic_scale_width, pic_scale_height)
self.instance.setScale(1)
self.instance.setScaleDest(dest_rect)
else:
self.instance.setScale(0)
self.instance.setPixmap(ptr)
else:
self.instance.setPixmap(None)
def updateIcon(self, filename):
new_IconFileName = filename
if (self.IconFileName != new_IconFileName):
self.IconFileName = new_IconFileName
self.picload.startDecode(self.IconFileName)
|
jathurdev/DiscordBot | Commands/poll.js | <reponame>jathurdev/DiscordBot
exports.run = async (client,message,args,config,tools) => {
const Discord = require(`discord.js`)
if (!message.member.hasPermission('ADMINISTRATOR')) return message.channel.send('Must be Administrator to run this command')
if(!args[0]) return message.channel.send(`Usage : ${config.Bot.Prefix}poll <question>`)
let Embed = new Discord.RichEmbed()
.setColor(config.Bot.BotColor)
.setAuthor("Poll :")
.setDescription(args.join(` `))
.setFooter("React to vote")
message.channel.send(Embed).then(msg=> {
msg.react(`✅`).then(m=> {
msg.react(`❎`)
})
})
} |
szkkteam/agrosys | frontend/app/farmApp/operation/task/models/task.js | import { Schema, many, fk, attr } from 'redux-orm';
import { PropTypes } from 'react';
import taskEntity from '../reducers/taskEntity'
import Model from 'utils/Model'
export class Task extends Model {
static get fields() {
return {
id: attr(),
title: attr(),
//cropPlan: fk('CropPlan', 'tasks'),
cropPlanId: fk({
to: 'CropPlan',
as: 'cropPlan',
relatedName: 'tasks',
}),
}
}
static reducer(action, Task, session) {
return taskEntity(action, Task, session)
}
static parse(data) {
const { CropPlan } = this.session
let clonedData = {
...data,
//cropPlan: CropPlan.parse({...data.cropPlan}),
}
// TODO: Do some parsing magic with relations
return this.upsert(clonedData)
}
toJSON() {
const data = {
...this.ref,
//cropPlan: this.cropPlan.toJSON(),
}
return data
}
}
Task.modelName = "Task"; |
gajgeospatial/VTK-9.0.1 | msvc/Rendering/OpenGL2/vtkPolyDataFS.cxx | #include "vtkPolyDataFS.h"
const char *vtkPolyDataFS =
"//VTK::System::Dec\n"
"\n"
"/*=========================================================================\n"
"\n"
" Program: Visualization Toolkit\n"
" Module: vtkPolyDataFS.glsl\n"
"\n"
" Copyright (c) <NAME>, <NAME>, <NAME>\n"
" All rights reserved.\n"
" See Copyright.txt or http://www.kitware.com/Copyright.htm for details.\n"
"\n"
" This software is distributed WITHOUT ANY WARRANTY; without even\n"
" the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR\n"
" PURPOSE. See the above copyright notice for more information.\n"
"\n"
"=========================================================================*/\n"
"// Template for the polydata mappers fragment shader\n"
"\n"
"uniform int PrimitiveIDOffset;\n"
"\n"
"//VTK::CustomUniforms::Dec\n"
"\n"
"// VC position of this fragment\n"
"//VTK::PositionVC::Dec\n"
"\n"
"// Camera prop\n"
"//VTK::Camera::Dec\n"
"\n"
"// optional color passed in from the vertex shader, vertexColor\n"
"//VTK::Color::Dec\n"
"\n"
"// optional surface normal declaration\n"
"//VTK::Normal::Dec\n"
"\n"
"// extra lighting parameters\n"
"//VTK::Light::Dec\n"
"\n"
"// Texture maps\n"
"//VTK::TMap::Dec\n"
"\n"
"// Texture coordinates\n"
"//VTK::TCoord::Dec\n"
"\n"
"// picking support\n"
"//VTK::Picking::Dec\n"
"\n"
"// Depth Peeling Support\n"
"//VTK::DepthPeeling::Dec\n"
"\n"
"// clipping plane vars\n"
"//VTK::Clip::Dec\n"
"\n"
"// the output of this shader\n"
"//VTK::Output::Dec\n"
"\n"
"// Apple Bug\n"
"//VTK::PrimID::Dec\n"
"\n"
"// handle coincident offsets\n"
"//VTK::Coincident::Dec\n"
"\n"
"// Value raster\n"
"//VTK::ValuePass::Dec\n"
"\n"
"void main()\n"
"{\n"
" // VC position of this fragment. This should not branch/return/discard.\n"
" //VTK::PositionVC::Impl\n"
"\n"
" // Place any calls that require uniform flow (e.g. dFdx) here.\n"
" //VTK::UniformFlow::Impl\n"
"\n"
" // Set gl_FragDepth here (gl_FragCoord.z by default)\n"
" //VTK::Depth::Impl\n"
"\n"
" // Early depth peeling abort:\n"
" //VTK::DepthPeeling::PreColor\n"
"\n"
" // Apple Bug\n"
" //VTK::PrimID::Impl\n"
"\n"
" //VTK::Clip::Impl\n"
"\n"
" //VTK::ValuePass::Impl\n"
"\n"
" //VTK::Color::Impl\n"
"\n"
" // Generate the normal if we are not passed in one\n"
" //VTK::Normal::Impl\n"
"\n"
" //VTK::Light::Impl\n"
"\n"
" //VTK::TCoord::Impl\n"
"\n"
" if (gl_FragData[0].a <= 0.0)\n"
" {\n"
" discard;\n"
" }\n"
"\n"
" //VTK::DepthPeeling::Impl\n"
"\n"
" //VTK::Picking::Impl\n"
"\n"
" // handle coincident offsets\n"
" //VTK::Coincident::Impl\n"
"}\n"
"";
|
matsubara0507/semantic | test/fixtures/java/corpus/While.B.java | <reponame>matsubara0507/semantic
class WhileDemo {
void main(String[] args){
String theString = "cheese";
while (theString != "poop") {
System.out.println("Count is: " + theString);
}
}
}
|
0zzyx/bcnode-unpacked-8.1 | src/rover/manager.es6 | <filename>src/rover/manager.es6
/**
* Copyright (c) 2017-present, blockcollider.org developers, All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
const { fork } = require('child_process')
const { glob } = require('glob')
const fs = require('fs')
const path = require('path')
const { flatten, groupBy } = require('ramda')
const debug = require('debug')('bcnode:rover:manager')
const logging = require('../logger')
const { errToString } = require('../helper/error')
const { Block } = require('../protos/core_pb')
const { RpcClient } = require('../rpc')
const ROVER_RESTART_TIMEOUT = 15000
const ROVED_DATA_PATH = path.resolve(__dirname, '..', '..', '_debug')
export const ROVER_DF_VOID_EXIT_CODE = 16
/**
* Rover lookup table
*
* Gets the rover path by name of it
*/
export const rovers = {
btc: path.resolve(__dirname, 'btc', 'rover.js'),
eth: path.resolve(__dirname, 'eth', 'rover.js'),
lsk: path.resolve(__dirname, 'lsk', 'rover.js'),
neo: path.resolve(__dirname, 'neo', 'rover.js'),
wav: path.resolve(__dirname, 'wav', 'rover.js')
}
/**
* Rover manager
*/
export class RoverManager {
_logger: Object // eslint-disable-line no-undef
_rovers: Object // eslint-disable-line no-undef
_timeouts: Object // eslint-disable-line no-undef
constructor () {
this._logger = logging.getLogger(__filename)
this._rovers = {}
this._timeouts = {}
}
get rovers (): Object {
return this._rovers
}
/**
* Start rover
* @param roverName Name of rover to start
* @returns {boolean} result
*/
startRover (roverName: string) {
const roverPath = rovers[roverName]
if (!roverPath) {
this._logger.error(`rover is not implemented '${roverName}'`)
return false
}
this._logger.info(`starting rover '${roverName}' using '${roverPath}'`)
// const cycleInterval = Math.floor(Math.random() * 50000)
// const roverRefreshTimeout = (1000 * 10) + cycleInterval
const rover = fork(
roverPath,
[],
{
execArgv: []
}
)
this._logger.info(`rover started '${roverName}'`)
this._rovers[roverName] = rover
// this._timeouts[roverName] = setTimeout(() => {
// this._logger.info('cycling rover ' + roverName)
// return this._killRover(roverName)
// }, roverRefreshTimeout)
rover.on('exit', (code, signal) => {
this._logger.warn(`rover ${roverName} exited (code: ${code}, signal: ${signal}) - restarting in ${ROVER_RESTART_TIMEOUT / 1000}s`)
delete this._rovers[roverName]
// TODO ROVER_RESTART_TIMEOUT should not be static 5s but probably some exponential backoff series separate for each rover
if (code !== ROVER_DF_VOID_EXIT_CODE) {
setTimeout(() => {
this.startRover(roverName)
}, ROVER_RESTART_TIMEOUT)
}
})
return true
}
/**
* Kill all rovers managed by this manager
* @return {*} Promise
*/
killRovers (): Promise<bool> {
const roverNames = Object.keys(this._rovers)
roverNames.map((roverName) => {
this._killRover(roverName)
})
return Promise.resolve(true)
}
replay () {
debug('Replaying roved blocks')
const pattern = path.join(ROVED_DATA_PATH, '**/unified/*.json')
let files: Array<string> = glob.sync(pattern)
const groups = groupBy((p) => {
const parts = p.split(path.sep)
return parts[parts.length - 4]
})(files)
const tmp: Array<any> = Object.keys(groups)
.map((k: string) => {
return groups[k].slice(-1)
}) || []
// $FlowFixMe
files = flatten(tmp)
.sort((a, b) => {
const fnameA = path.posix.basename(a)
const fnameB = path.posix.basename(b)
if (fnameA < fnameB) {
return -1
} else if (fnameA > fnameB) {
return 1
}
return 0
})
const rpc = new RpcClient()
files.forEach((f) => {
const json = fs.readFileSync(f).toString()
const obj = JSON.parse(json)
const block = new Block()
block.setBlockchain(obj.blockchain)
block.setHash(obj.hash)
block.setPreviousHash(obj.previousHash)
block.setTimestamp(obj.timestamp)
block.setHeight(obj.height)
block.setMerkleRoot(obj.merkleRoot)
debug(`Replaying roved block`, f, obj)
rpc.rover.collectBlock(block, (err) => {
if (err) {
debug(`Unable to collect block ${f}`, err)
} else {
debug('recieved block from ' + obj.blockchain)
}
})
})
}
/**
* Kill rover managed by this manager by its name
* @param roverName
* @private
*/
_killRover (roverName: string) {
const { pid } = this._rovers[roverName]
this._logger.info(`Killing rover '${roverName}', PID: ${pid}`)
try {
process.kill(pid, 'SIGHUP')
} catch (err) {
this._logger.warn(`Error while killing rover '${roverName}', PID: ${pid}, error: ${errToString(err)}`)
}
}
}
export default RoverManager
|
sempmessaging/semp | sempd-core/src/main/java/org/sempmessaging/sempd/core/serverkeys/VerificationKeysFactory.java | <filename>sempd-core/src/main/java/org/sempmessaging/sempd/core/serverkeys/VerificationKeysFactory.java
package org.sempmessaging.sempd.core.serverkeys;
import org.sempmessaging.libsemp.arguments.Args;
import org.sempmessaging.libsemp.key.PublicVerificationKey;
import java.util.List;
public class VerificationKeysFactory {
public PublicVerificationKeys newVerificationKeys(final List<PublicVerificationKey> keysList) {
Args.notNull(keysList, "keysList");
return new PublicVerificationKeys(keysList);
}
}
|
BananaNo1/leetcode | src/simple/s501.java | package simple;public class s501 {
}
|
Felorati/Thesis | code/Tests/Multiverse/multiverse-core/src/test/java/org/multiverse/stms/gamma/transactionalobjects/txnlong/GammaTxnLong_loadTest.java | <reponame>Felorati/Thesis
package org.multiverse.stms.gamma.transactionalobjects.txnlong;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.multiverse.api.LockMode;
import org.multiverse.stms.gamma.GammaConstants;
import org.multiverse.stms.gamma.GammaStm;
import org.multiverse.stms.gamma.GammaTestUtils;
import org.multiverse.stms.gamma.transactionalobjects.GammaTxnLong;
import org.multiverse.stms.gamma.transactionalobjects.Tranlocal;
import org.multiverse.stms.gamma.transactions.GammaTxn;
import java.util.Collection;
import static java.util.Arrays.asList;
import static org.junit.Assert.*;
import static org.multiverse.stms.gamma.GammaTestUtils.*;
@RunWith(Parameterized.class)
public class GammaTxnLong_loadTest implements GammaConstants {
private GammaStm stm;
private boolean readBiased;
private boolean arriveNeeded;
public GammaTxnLong_loadTest(boolean readBiased, boolean arriveNeeded) {
this.readBiased = readBiased;
this.arriveNeeded = arriveNeeded;
}
@Before
public void setUp() {
stm = new GammaStm();
}
@Parameterized.Parameters
public static Collection<Boolean[]> configs() {
return asList(
new Boolean[]{false, false},
new Boolean[]{false, true},
new Boolean[]{true, false},
new Boolean[]{true, true}
);
}
public GammaTxnLong newTxnLong(long initialValue) {
GammaTxnLong ref = new GammaTxnLong(stm, initialValue);
if (readBiased) {
ref = GammaTestUtils.makeReadBiased(ref);
}
return ref;
}
// ====================== locking ==========================
@Test
public void locking_whenNotLockedByOtherAndNoLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.None);
GammaTxn tx = stm.newDefaultTxn();
Tranlocal tranlocal = new Tranlocal();
boolean result = ref.load(tx,tranlocal, LOCKMODE_NONE, 1, arriveNeeded);
assertTrue(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertSame(ref, tranlocal.owner);
assertEquals(initialValue, tranlocal.long_value);
assertEquals(initialValue, tranlocal.long_oldValue);
assertEquals(initialVersion, tranlocal.version);
assertEquals(arriveNeeded && !readBiased, tranlocal.hasDepartObligation());
assertLockMode(ref, LockMode.None);
assertSurplus(ref, arriveNeeded ? 1 : 0);
assertReadonlyCount(ref, 0);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenNotLockedByOtherAndReadLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.None);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_READ, 1, arriveNeeded);
assertTrue(result);
assertEquals(LOCKMODE_READ, tranlocal.getLockMode());
assertSame(ref, tranlocal.owner);
assertEquals(initialValue, tranlocal.long_value);
assertEquals(initialValue, tranlocal.long_oldValue);
assertEquals(initialVersion, tranlocal.version);
assertEquals(!readBiased, tranlocal.hasDepartObligation());
assertLockMode(ref, LockMode.Read);
assertReadLockCount(ref, 1);
assertSurplus(ref, 1);
assertReadonlyCount(ref, 0);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenNotLockedByOtherAndWriteLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.None);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_WRITE, 1, arriveNeeded);
assertTrue(result);
assertEquals(LOCKMODE_WRITE, tranlocal.getLockMode());
assertSame(ref, tranlocal.owner);
assertEquals(initialValue, tranlocal.long_value);
assertEquals(initialValue, tranlocal.long_oldValue);
assertEquals(initialVersion, tranlocal.version);
assertEquals(!readBiased, tranlocal.hasDepartObligation());
assertLockMode(ref, LockMode.Write);
assertSurplus(ref, 1);
assertReadonlyCount(ref, 0);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenNotLockedByOtherAndExclusiveLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.None);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_EXCLUSIVE, 1, arriveNeeded);
assertTrue(result);
assertEquals(LOCKMODE_EXCLUSIVE, tranlocal.getLockMode());
assertSame(ref, tranlocal.owner);
assertEquals(initialValue, tranlocal.long_value);
assertEquals(initialValue, tranlocal.long_oldValue);
assertEquals(initialVersion, tranlocal.version);
assertEquals(!readBiased, tranlocal.hasDepartObligation());
assertLockMode(ref, LockMode.Exclusive);
assertSurplus(ref, 1);
assertReadonlyCount(ref, 0);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenReadLockedByOtherAndNoLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Read);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_NONE, 1, arriveNeeded);
assertTrue(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertSame(ref, tranlocal.owner);
assertEquals(initialValue, tranlocal.long_value);
assertEquals(initialValue, tranlocal.long_oldValue);
assertEquals(initialVersion, tranlocal.version);
assertEquals(!readBiased && arriveNeeded, tranlocal.hasDepartObligation());
assertLockMode(ref, LockMode.Read);
assertSurplus(ref, !readBiased && arriveNeeded ? 2 : 1);
assertReadLockCount(ref, 1);
assertReadonlyCount(ref, 0);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenReadLockedByOtherAndReadLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Read);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_READ, 1, arriveNeeded);
assertTrue(result);
assertEquals(LOCKMODE_READ, tranlocal.getLockMode());
assertSame(ref, tranlocal.owner);
assertEquals(initialValue, tranlocal.long_value);
assertEquals(initialValue, tranlocal.long_oldValue);
assertEquals(initialVersion, tranlocal.version);
assertEquals(!readBiased, tranlocal.hasDepartObligation());
assertLockMode(ref, LockMode.Read);
assertSurplus(ref, readBiased ? 1 : 2);
assertReadLockCount(ref, 2);
assertReadonlyCount(ref, 0);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenReadLockedByOtherAndWriteLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Read);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_WRITE, 1, arriveNeeded);
assertFalse(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertNull(tranlocal.owner);
assertLockMode(ref, LockMode.Read);
assertSurplus(ref, 1);
assertReadLockCount(ref, 1);
assertReadonlyCount(ref, 0);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenReadLockedByOtherAndExclusiveLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Read);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_EXCLUSIVE, 1, arriveNeeded);
assertFalse(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertNull(tranlocal.owner);
assertLockMode(ref, LockMode.Read);
assertSurplus(ref, 1);
assertReadLockCount(ref, 1);
assertReadonlyCount(ref, 0);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenWriteLockedByOtherAndNoLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Write);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_NONE, 1, arriveNeeded);
assertTrue(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertSame(ref, tranlocal.owner);
assertEquals(initialValue, tranlocal.long_value);
assertEquals(initialValue, tranlocal.long_oldValue);
assertEquals(initialVersion, tranlocal.version);
assertEquals(arriveNeeded && !readBiased, tranlocal.hasDepartObligation());
assertLockMode(ref, LockMode.Write);
assertSurplus(ref, arriveNeeded && !readBiased ? 2 : 1);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenWritetLockedByOtherAndReadLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Write);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_READ, 1, arriveNeeded);
assertFalse(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertNull(tranlocal.owner);
assertLockMode(ref, LockMode.Write);
assertSurplus(ref, 1);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenWriteLockedByOtherAndWriteLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Write);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_WRITE, 1, arriveNeeded);
assertFalse(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertNull(tranlocal.owner);
assertLockMode(ref, LockMode.Write);
assertSurplus(ref, 1);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenWriteLockedByOtherAndExclusiveLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Write);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_EXCLUSIVE, 1, arriveNeeded);
assertFalse(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertNull(tranlocal.owner);
assertLockMode(ref, LockMode.Write);
assertSurplus(ref, 1);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenExclusiveLockedByOtherAndNoLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Exclusive);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx, tranlocal, LOCKMODE_NONE, 1, arriveNeeded);
assertFalse(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertNull(tranlocal.owner);
assertLockMode(ref, LockMode.Exclusive);
assertSurplus(ref, 1);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenExclusiveLockedByOtherAndReadLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Exclusive);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_READ, 1, arriveNeeded);
assertFalse(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertNull(tranlocal.owner);
assertLockMode(ref, LockMode.Exclusive);
assertSurplus(ref, 1);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenExclusiveLockedByOtherAndWriteLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Exclusive);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_WRITE, 1, arriveNeeded);
assertFalse(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertNull(tranlocal.owner);
assertLockMode(ref, LockMode.Exclusive);
assertSurplus(ref, 1);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
@Test
public void locking_whenExclusiveLockedByOtherAndExclusiveLockNeeded() {
long initialValue = 10;
GammaTxnLong ref = newTxnLong(initialValue);
long initialVersion = ref.getVersion();
GammaTxn otherTx = stm.newDefaultTxn();
ref.getLock().acquire(otherTx, LockMode.Exclusive);
Tranlocal tranlocal = new Tranlocal();
GammaTxn tx = stm.newDefaultTxn();
boolean result = ref.load(tx,tranlocal, LOCKMODE_EXCLUSIVE, 1, arriveNeeded);
assertFalse(result);
assertEquals(LOCKMODE_NONE, tranlocal.getLockMode());
assertNull(tranlocal.owner);
assertLockMode(ref, LockMode.Exclusive);
assertSurplus(ref, 1);
assertReadBiased(ref, readBiased);
assertVersionAndValue(ref, initialVersion, initialValue);
}
}
|
pikinder/nn-patterns | nn_patterns/explainer/gradient_based.py | # Begin: Python 2/3 compatibility header small
# Get Python 3 functionality:
from __future__ import\
absolute_import, print_function, division, unicode_literals
from future.utils import raise_with_traceback, raise_from
# catch exception with: except Exception as e
from builtins import range, map, zip, filter
from io import open
import six
# End: Python 2/3 compatability header small
import lasagne.layers as L
import lasagne.nonlinearities
import numpy as np
import theano
import theano.tensor as T
from .base import BaseRelevanceExplainer
from .base import BaseInvertExplainer
from ..utils import misc as umisc
__all__ = [
"GradientExplainer",
"BaseDeConvNetExplainer",
"DeConvNetExplainer",
"GuidedBackpropExplainer",
"AlternativeGradientExplainer",
]
class AlternativeGradientExplainer(BaseRelevanceExplainer):
"""
Explainer that uses automatic differentiation
to get the gradient of the input with respect to the output.
"""
def _init_explain_function(self, patterns=None, **kwargs):
with umisc.ignore_sigmoids(self.output_layer) as output_layer:
Y = L.get_output(output_layer, deterministic=True)
X = self.input_layer.input_var # original
I = T.iscalar() # Output neuron
S = T.iscalar() # Sample that is desired
E = T.grad(Y[S].flatten()[I], X)
self.grad_function = theano.function(inputs=[X, S, I], outputs=E)
def explain(self, X, target="max_output", **kwargs):
explanation = np.zeros_like(X)
relevance_values = self._get_relevance_values(X, target=target)
for i in range(relevance_values.shape[0]):
for j in range(relevance_values.shape[1]):
if relevance_values[i, j] != 0:
# Todo: Why do we do for each sample?
explanation[i:i+1] += (self.grad_function(X[i:i+1],
0, j) *
relevance_values[i, j])
return explanation
def get_name(self):
return "Gradient"
class BaseDeConvNetExplainer(BaseInvertExplainer):
def _set_inverse_parameters(self, patterns=None):
for l in L.get_all_layers(self.output_layer):
if type(l) is L.Conv2DLayer:
W = l.W.get_value()
if l.flip_filters:
W = W[:,:,::-1,::-1]
W = W.transpose(1,0,2,3)
self.inverse_map[l].W.set_value(W)
elif type(l) is L.DenseLayer:
self.inverse_map[l].W.set_value(l.W.get_value().T)
def _put_rectifiers(self,input_layer,layer):
raise RuntimeError("Needs to be implemented by the subclass.")
class DeConvNetExplainer(BaseDeConvNetExplainer):
def _invert_LocalResponseNormalisation2DLayer(self, layer, feeder):
return feeder
def _put_rectifiers(self, input_layer, layer):
return umisc.get_rectifier_layer(input_layer, layer)
def get_name(self):
return "DeConvNet"
class GuidedBackpropExplainer(BaseDeConvNetExplainer):
def _invert_LocalResponseNormalisation2DLayer(self, layer, feeder):
return feeder
def _put_rectifiers(self, input_layer, layer):
input_layer = umisc.get_rectifier_layer(input_layer, layer)
return umisc.get_rectifier_copy_layer(input_layer, layer)
def get_name(self):
return "Guided BackProp"
class GradientExplainer(BaseDeConvNetExplainer):
def _invert_LocalResponseNormalisation2DLayer(self, layer, feeder):
return L.InverseLayer(feeder,layer)
def _put_rectifiers(self, input_layer, layer):
return umisc.get_rectifier_copy_layer(input_layer, layer)
def get_name(self):
return "alternative Gradient"
|
AlaEddinBouker/medic | public/js/signup.js | (window.webpackJsonp = window.webpackJsonp || []).push([[25], {
341: function (e, r, n) {
"use strict";
n.r(r);
var o = n(0), t = n.n(o);
n(6), n(12);
!function () {
function errorPlacementInput(e, r) {
r.parent().is(".mda-form-control") ? e.insertAfter(r.parent()) : r.is(":radio") || r.is(":checkbox") ? e.insertAfter(r.parent()) : e.insertAfter(r)
}
t()(function () {
t()("#user-signup").validate({
errorPlacement: errorPlacementInput,
rules: {
accountName: {required: !0, email: !0},
accountPassword: {required: !0},
accountPasswordCheck: {required: !0, equalTo: "#account-password"}
},
submitHandler: function () {
console.log("Form submitted!"), t()("#form-ok").hide().removeClass("invisible").show(500)
}
})
})
}()
}
}, [[341, 0]]]); |
jehovahsays/MMOSOCKETHTML5JSCHAT | examples/chat/node_modules/parsoid/lib/utils/jsutils.js | <filename>examples/chat/node_modules/parsoid/lib/utils/jsutils.js
/*
* This file contains Parsoid-independent JS helper functions.
* Over time, more functions can be migrated out of various other files here.
*/
'use strict';
require('../../core-upgrade.js');
var Promise = require('./promise.js');
var rejectMutation = function() {
throw new TypeError("Mutation attempted on read-only collection.");
};
var lastItem = function(array) {
console.assert(Array.isArray(array));
return array[array.length - 1];
};
var JSUtils = {
lastItem: lastItem,
mapObject: function(obj) {
return new Map(Object.entries(obj));
},
// ES6 maps/sets are still writable even when frozen, because they
// store data inside the object linked from an internal slot.
// This freezes a map by disabling the mutation methods, although
// its not bulletproof: you could use `Map.prototype.set.call(m, ...)`
// to still mutate the backing store.
freezeMap: function(it, freezeEntries) {
// Allow `it` to be an iterable, as well as a map.
if (!(it instanceof Map)) { it = new Map(it); }
it.set = it.clear = it['delete'] = rejectMutation;
Object.freeze(it);
if (freezeEntries) {
it.forEach(function(v, k) {
JSUtils.deepFreeze(v);
JSUtils.deepFreeze(k);
});
}
return it;
},
// This makes a set read-only.
freezeSet: function(it, freezeEntries) {
// Allow `it` to be an iterable, as well as a set.
if (!(it instanceof Set)) { it = new Set(it); }
it.add = it.clear = it['delete'] = rejectMutation;
Object.freeze(it);
if (freezeEntries) {
it.forEach(function(v) {
JSUtils.deepFreeze(v);
});
}
return it;
},
// Deep-freeze an object
// See https://developer.mozilla.org/en-US/docs/JavaScript/Reference/Global_Objects/Object/freeze
deepFreeze: function(o) {
if (!(o instanceof Object)) {
return o;
} else if (Object.isFrozen(o)) {
// Note that this might leave an unfrozen reference somewhere in
// the object if there is an already frozen object containing an
// unfrozen object.
return o;
} else if (o instanceof Map) {
return JSUtils.freezeMap(o, true);
} else if (o instanceof Set) {
return JSUtils.freezeSet(o, true);
}
Object.freeze(o);
for (var propKey in o) {
var desc = Object.getOwnPropertyDescriptor(o, propKey);
if ((!desc) || desc.get || desc.set) {
// If the object is on the prototype or is a getter, skip it.
continue;
}
// Recursively call deepFreeze.
JSUtils.deepFreeze(desc.value);
}
return o;
},
deepFreezeButIgnore: function(o, ignoreFields) {
for (var prop in o) {
var desc = Object.getOwnPropertyDescriptor(o, prop);
if (ignoreFields[prop] === true || (!desc) || desc.get || desc.set) {
// Ignore getters, primitives, and explicitly ignored fields.
return;
}
o[prop] = JSUtils.deepFreeze(desc.value);
}
Object.freeze(o);
},
// Convert a counter to a Base64 encoded string.
// Padding is stripped. \,+ are replaced with _,- respectively.
// Warning: Max integer is 2^31 - 1 for bitwise operations.
counterToBase64: function(n) {
/* jshint bitwise: false */
var arr = [];
do {
arr.unshift(n & 0xff);
n >>= 8;
} while (n > 0);
return (new Buffer(arr))
.toString("base64")
.replace(/=/g, "")
.replace(/\//g, "_")
.replace(/\+/g, "-");
},
// Join pieces of regular expressions together. This helps avoid
// having to switch between string and regexp quoting rules, and
// can also give you a poor-man's version of the "x" flag, ie:
// var re = rejoin( "(",
// /foo|bar/, "|",
// someRegExpFromAVariable
// ")", { flags: "i" } );
// Note that this is basically string concatenation, except that
// regular expressions are converted to strings using their `.source`
// property, and then the final resulting string is converted to a
// regular expression.
// If the final argument is a regular expression, its flags will be
// used for the result. Alternatively, you can make the final argument
// an object, with a `flags` property (as shown in the example above).
rejoin: function() {
var regexps = Array.prototype.slice.call(arguments);
var last = lastItem(regexps);
var flags;
if (typeof (last) === 'object') {
if (last instanceof RegExp) {
flags = /\/([gimy]*)$/.exec(last.toString())[1];
} else {
flags = regexps.pop().flags;
}
}
return new RegExp(regexps.reduce(function(acc, r) {
return acc + (r instanceof RegExp ? r.source : r);
}, ''), flags === undefined ? '' : flags);
},
// Append an array to an accumulator using the most efficient method
// available. Makes sure that accumulation is O(n).
pushArray: function push(accum, arr) {
if (accum.length < arr.length) {
return accum.concat(arr);
} else {
// big accum & arr
for (var i = 0, l = arr.length; i < l; i++) {
accum.push(arr[i]);
}
return accum;
}
},
// Helper function to ease migration to Promise-based control flow
// (aka, "after years of wandering, arrive in the Promise land").
// This function allows retrofitting an existing callback-based
// method to return an equivalent Promise, allowing enlightened
// new code to omit the callback parameter and treat it as if
// it had an API which simply returned a Promise for the result.
//
// Sample use:
// // callback is node-style: callback(err, value)
// function legacyApi(param1, param2, callback) {
// callback = JSUtils.mkPromised(callback); // THIS LINE IS NEW
// ... some implementation here...
// return callback.promise; // THIS LINE IS NEW
// }
// // old-style caller, still works:
// legacyApi(x, y, function(err, value) { ... });
// // new-style caller, such hotness:
// return legacyApi(x, y).then(function(value) { ... });
//
// The optional `names` parameter to `mkPromised` is the same
// as the optional second argument to `Promise.promisify` in
// https://github/cscott/prfun
// It allows the use of `mkPromised` for legacy functions which
// promise multiple results to their callbacks, eg:
// callback(err, body, response); // from npm "request" module
// For this callback signature, you have two options:
// 1. Pass `true` as the names parameter:
// function legacyRequest(options, callback) {
// callback = JSUtils.mkPromised(callback, true);
// ... existing implementation...
// return callback.promise;
// }
// This resolves the promise with the array `[body, response]`, so
// a Promise-using caller looks like:
// return legacyRequest(options).then(function(r) {
// var body = r[0], response = r[1];
// ...
// }
// If you are using `prfun` then `Promise#spread` is convenient:
// return legacyRequest(options).spread(function(body, response) {
// ...
// });
// 2. Alternatively (and probably preferably), provide an array of strings
// as the `names` parameter:
// function legacyRequest(options, callback) {
// callback = JSUtils.mkPromised(callback, ['body','response']);
// ... existing implementation...
// return callback.promise;
// }
// The resolved value will be an object with those fields:
// return legacyRequest(options).then(function(r) {
// var body = r.body, response = r.response;
// ...
// }
// Note that in both cases the legacy callback behavior is unchanged:
// legacyRequest(options, function(err, body, response) { ... });
//
mkPromised: function(callback, names) {
var res, rej;
var p = new Promise(function(_res, _rej) { res = _res; rej = _rej; });
var f = function(e, v) {
if (e) {
rej(e);
} else if (names === true) {
res(Array.prototype.slice.call(arguments, 1));
} else if (names) {
var value = {};
for (var index in names) {
value[names[index]] = arguments[(+index) + 1];
}
res(value);
} else {
res(v);
}
return callback && callback.apply(this, arguments);
};
f.promise = p;
return f;
},
/**
* Determine whether two objects are identical, recursively.
*/
deepEquals: function(a, b) {
var i;
if (a === b) {
// If only it were that simple.
return true;
}
if (a === undefined || b === undefined ||
a === null || b === null) {
return false;
}
if (a.constructor !== b.constructor) {
return false;
}
if (a instanceof Object) {
for (i in a) {
if (!this.deepEquals(a[i], b[i])) {
return false;
}
}
for (i in b) {
if (a[i] === undefined) {
return false;
}
}
return true;
}
return false;
},
};
if (typeof module === "object") {
module.exports.JSUtils = JSUtils;
}
|
codecodify/huaweiyun | openstack/bssintl/v1/bill/results.go | <reponame>codecodify/huaweiyun
package bill
import "github.com/gophercloud/gophercloud"
type QueryPartnerMonthlyBillsResp struct {
//Error code.
ErrorCode string `json:"error_code"`
//Error description
ErrorMsg string `json:"error_msg"`
//Billing cycle
BillCycle string `json:"billCycle"`
//Bill amount, which is calculated based on the special commercial discount of the partner.
CreditDebtAmount *float64 `json:"creditDebtAmount,omitempty"`
//Consumption amount, which is calculated based on the special commercial discount of the partner.
ConsumeAmount *float64 `json:"consumeAmount,omitempty"`
//Write-off amount (negative value), which is calculated based on the special commercial discount of the partner.
Writeoffdebt *float64 `json:"writeoffdebt,omitempty"`
//Unsubscription amount (negative value), which is calculated based on the special commercial discount of the partner.
unsubscribeAmount *float64 `json:"unsubscribeAmount,omitempty"`
//Unit
measureId *int `json:"measureId,omitempty"`
//This parameter is returned only when the query is successful.
Currency string `json:"currency"`
//Tax amount, which is the tax amount in the creditDebtAmount field.
TaxAmount *float64 `json:"taxAmount,omitempty"`
//Bill amount that is not settled, which is calculated based on the special commercial discount of the partner.
UnclearedAmount *float64 `json:"unclearedAmount,omitempty"`
//Due date for bills.
DueDate string `json:"dueDate"`
//Bill list.
BillList []PostpaidBillInfo `json:"billList"`
}
type PostpaidBillInfo struct {
//Bill type
BillType string `json:"billType"`
//Cloud service type code.
CloudServiceTypeCode string `json:"cloudServiceTypeCode"`
//Resource type code.
ResourceTypeCode string `json:"resourceTypeCode"`
//Billing mode.
PayMethod string `json:"payMethod"`
//Transaction amount/unsubscription amount/refund amount of the customer, including the vouchers, flexi-purchase coupons, reserved flexi-purchase coupons, and stored-value cards.
CreditDebtAmount *float64 `json:"creditDebtAmount"`
//Transaction amount/unsubscription amount/refund amount of the customer,not including the vouchers, flexi-purchase coupons, reserved flexi-purchase coupons, or stored-value cards.
CustomerAmountDue *float64 `json:"customerAmountDue"`
//Settlement product type.
SettlementType *int `json:"settlementType,omitempty"`
//Partner discount percentage
PartnerRatio *float64 `json:"partnerRatio,omitempty"`
//Amount that the partner needs to refund/Amount that the partner has refund
PartnerAmount *float64 `json:"partnerAmount,omitempty"`
//Yearly/monthly unit.
PeriodType *int `json:"periodType,omitempty"`
//Number of yearly/month periods.
PeriodNum *int `json:"periodNum,omitempty"`
//Product category code.
CategoryCode string `json:"categoryCode"`
}
type QueryMonthlyExpenditureSummaryResp struct {
//Error code
ErrorCode string `json:"error_code"`
//Error description.
ErrorMsg string `json:"error_msg"`
//Currency.
Currency string `json:"currency"`
//Number of the total records
TotalCount *int `json:"total_count,omitempty"`
//Record information
BillSums []BillSumRecordInfo `json:"bill_sums"`
}
type BillSumRecordInfo struct {
//Customer ID.
CustomerId string `json:"customer_id"`
//Resource type code
ResourceTypeCode string `json:"resource_type_code"`
//Cloud service region
RegionCode string `json:"region_code"`
//Cloud service type code
CloudServiceTypeCode string `json:"cloud_service_type_code"`
//Expenditure data collection period
ConsumeTime string `json:"consume_time"`
//Expenditure type
PayMethod string `json:"pay_method"`
//Consumption amount
ConsumeAmount *float64 `json:"consume_amount,omitempty"`
//Outstanding amount
Debt *float64 `json:"debt,omitempty"`
//Discounted amount
Discount *float64 `json:"discount,omitempty"`
//Unit
MeasureId *int `json:"measure_id,omitempty"`
//Expenditure type
BillType *int `json:"bill_type,omitempty"`
//Total payment amount distinguished by expenditure type and payment method of an account.
AccountDetails []BalanceTypePay `json:"account_details"`
//Discounted amount details
DiscountDetailInfos []DiscountDetailInfo `json:"discount_detail_infos"`
//Enterprise project ID
EnterpriseProjectId string `json:"enterpriseProjectId"`
}
type BalanceTypePay struct {
//Account type
BalanceTypeId string `json:"balance_type_id,omitempty"`
//Deducted amount
DeductAmount float64 `json:"deduct_amount,omitempty"`
}
type DiscountDetailInfo struct {
//Discount type
PromotionType string `json:"promotion_type"`
//Discounted amount
DiscountAmount *float64 `json:"discount_amount,omitempty"`
//Discount type ID
PromotionId string `json:"promotion_id"`
//Unit
MeasureId *int `json:"measure_id,omitempty"`
}
type QueryResourceUsageDetailsResp struct {
//Error code
ErrorCode string `json:"error_code"`
//Error description
ErrorMsg string `json:"error_msg"`
//Currency unit
Currency string `json:"currency"`
//Number of result sets
TotalCount *int `json:"total_count,omitempty"`
//Resource usage record
MonthlyRecords []MonthlyRecord `json:"monthlyRecords"`
}
type MonthlyRecord struct {
//Cloud service type code
CloudServiceTypeCode string `json:"cloudServiceTypeCode"`
//Cloud service region code
ResourceTypeCode string `json:"resourceTypeCode"`
//Resource type code
RegionCode string `json:"regionCode"`
//Resource instance ID
ResInstanceId string `json:"resInstanceId"`
//Resource name
ResourceName string `json:"resourceName"`
//Resource tag
ResourceTag string `json:"resourceTag"`
//Consumption amount of a cloud service, including the amount of cash coupons.
ConsumeAmount *float64 `json:"consumeAmount,omitempty"`
//Expenditure month
Cycle string `json:"cycle"`
//Unit
MeasureId *int `json:"measureId,omitempty"`
//Enterprise project ID
EnterpriseProjectId string `json:"enterpriseProjectId"`
//Billing mode
PayMethod string `json:"payMethod"`
}
type QueryResourceUsageRecordResp struct {
//Error code
ErrorCode string `json:"error_code"`
//Error description
ErrorMsg string `json:"error_msg"`
//Currency unit.
Currency string `json:"currency"`
//Number of result sets
TotalCount *int `json:"totalCount,omitempty"`
//Resource usage record
FeeRecords []ResFeeRecord `json:"feeRecords"`
}
type ResFeeRecord struct {
//Fee generation time
CreateTime string `json:"createTime"`
//Start time of using the resource corresponding to the fee.
EffectiveTime string `json:"effectiveTime"`
//End time of using the resource corresponding to the fee
ExpireTime string `json:"expireTime"`
//Fee record serial number
FeeId string `json:"feeId"`
//Product ID
ProductId string `json:"productId"`
//Product name
ProductName string `json:"productName"`
//Order ID
OrderId string `json:"orderId"`
//Consumption amount, including the amount of cash coupons.
Amount *float64 `json:"amount,omitempty"`
//Unit
MeasureId *int `json:"measureId,omitempty"`
//Usage
UsageAmount *float64 `json:"usageAmount,omitempty"`
//Usage unit
UsageMeasureId *int `json:"usageMeasureId,omitempty"`
//Package usage.
FreeResourceAmount *float64 `json:"freeResourceAmount,omitempty"`
//Unit (package usage)
FreeResourceMeasureId *int `json:"freeResourceMeasureId,omitempty"`
//Cloud service type code
CloudServiceTypeCode string `json:"cloudServiceTypeCode"`
//Resource type code
ResourceTypeCode string `json:"resourceTypeCode"`
//Cloud service region code
RegionCode string `json:"regionCode"`
//Payment method
PayMethod string `json:"payMethod"`
//Project ID.
ProjectID string `json:"projectID"`
//Project name.
ProjectName string `json:"projectName"`
//Resource tag
ResourceTag string `json:"resourceTag"`
//Resource name
ResourceName string `json:"resourceName"`
//Resource ID.
ResourceId string `json:"resourceId"`
//Expenditure type
FeeSourceOperation *int `json:"feeSourceOperation,omitempty"`
//Enterprise project ID.
EnterpriseProjectId string `json:"enterpriseProjectId"`
//Period type
PeriodType string `json:"periodType"`
//Spot instance ID
Spot string `json:"spot"`
//Reserved instance usage
RIAmount *float64 `json:"rIAmount,omitempty"`
//Unit (reserved instance usage)
RIMeasureId *int `json:"rIMeasureId,omitempty"`
}
type QueryPartnerMonthlyBillsResult struct {
gophercloud.Result
}
func (r QueryPartnerMonthlyBillsResult) Extract() (*QueryPartnerMonthlyBillsResp, error) {
var res *QueryPartnerMonthlyBillsResp
err := r.ExtractInto(&res)
return res, err
}
type QueryMonthlyExpenditureSummaryResult struct {
gophercloud.Result
}
func (r QueryMonthlyExpenditureSummaryResult) Extract() (*QueryMonthlyExpenditureSummaryResp, error) {
var res *QueryMonthlyExpenditureSummaryResp
err := r.ExtractInto(&res)
return res, err
}
type QueryResourceUsageDetailsResult struct {
gophercloud.Result
}
func (r QueryResourceUsageDetailsResult) Extract() (*QueryResourceUsageDetailsResp, error) {
var res *QueryResourceUsageDetailsResp
err := r.ExtractInto(&res)
return res, err
}
type QueryResourceUsageRecordResult struct {
gophercloud.Result
}
func (r QueryResourceUsageRecordResult) Extract() (*QueryResourceUsageRecordResp, error) {
var res *QueryResourceUsageRecordResp
err := r.ExtractInto(&res)
return res, err
}
|
Andrew-M-C/Linux-Linux_Programming_Template | history/150508_libevServer/AMCLibevTypes.h | /*******************************************************************************
Copyright (C), 2011-2015, <NAME>
File name: AMCLibevTypes.h
Description:
This file provide data structures for AMC libev tools.
Author: <NAME> (<NAME>)
History:
2015-05-08: File created
--------------------------------------------------------------
Copyright information:
This file was intended to be under GPL protocol. However, I may use this library
in my working as I am an employee. And my company may require me to keep it se-
cret. Therefore, this file is neither open source nor under GPL control.
********************************************************************************/
#ifndef _AMC_LIBEV_TYPES_H
#define _AMC_LIBEV_TYPES_H
#include <stdlib.h>
#include "ev.h"
#ifndef NULL
#define NULL ((void*)0)
#endif
struct AMC_ev_io {
struct ev_io watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_timer {
struct ev_timer watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_periodic {
struct ev_periodic watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_signal {
struct ev_signal watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_child {
struct ev_child watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_stat {
struct ev_stat watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_idle {
struct ev_idle watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_prepare {
struct ev_prepare watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_check {
struct ev_check watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_fork {
struct ev_fork watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_cleanup {
struct ev_cleanup watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_embed {
struct ev_embed watcher;
void *arg;
size_t argLen;
};
struct AMC_ev_async {
struct ev_async watcher;
void *arg;
size_t argLen;
};
#endif /* End of file */
|
xutao6936/archYBT | src/main/java/com/topcheer/ybt/system/service/ITopSequenceService.java | package com.topcheer.ybt.system.service;
public interface ITopSequenceService {
public String getSequenceByName();
}
|
byxorna/nycmesh-tool | generated/go/uisp/client/devices/get_devices_erouters_id_router_routes_responses.go | // Code generated by go-swagger; DO NOT EDIT.
package devices
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime"
"github.com/go-openapi/strfmt"
"github.com/byxorna/nycmesh-tool/generated/go/uisp/models"
)
// GetDevicesEroutersIDRouterRoutesReader is a Reader for the GetDevicesEroutersIDRouterRoutes structure.
type GetDevicesEroutersIDRouterRoutesReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *GetDevicesEroutersIDRouterRoutesReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewGetDevicesEroutersIDRouterRoutesOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
case 400:
result := NewGetDevicesEroutersIDRouterRoutesBadRequest()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 401:
result := NewGetDevicesEroutersIDRouterRoutesUnauthorized()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 403:
result := NewGetDevicesEroutersIDRouterRoutesForbidden()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 404:
result := NewGetDevicesEroutersIDRouterRoutesNotFound()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 500:
result := NewGetDevicesEroutersIDRouterRoutesInternalServerError()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
default:
return nil, runtime.NewAPIError("response status code does not match any response statuses defined for this endpoint in the swagger spec", response, response.Code())
}
}
// NewGetDevicesEroutersIDRouterRoutesOK creates a GetDevicesEroutersIDRouterRoutesOK with default headers values
func NewGetDevicesEroutersIDRouterRoutesOK() *GetDevicesEroutersIDRouterRoutesOK {
return &GetDevicesEroutersIDRouterRoutesOK{}
}
/* GetDevicesEroutersIDRouterRoutesOK describes a response with status code 200, with default header values.
Successful
*/
type GetDevicesEroutersIDRouterRoutesOK struct {
Payload models.ListOfEdgeRouterRoutes
}
func (o *GetDevicesEroutersIDRouterRoutesOK) Error() string {
return fmt.Sprintf("[GET /devices/erouters/{id}/router/routes][%d] getDevicesEroutersIdRouterRoutesOK %+v", 200, o.Payload)
}
func (o *GetDevicesEroutersIDRouterRoutesOK) GetPayload() models.ListOfEdgeRouterRoutes {
return o.Payload
}
func (o *GetDevicesEroutersIDRouterRoutesOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response payload
if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetDevicesEroutersIDRouterRoutesBadRequest creates a GetDevicesEroutersIDRouterRoutesBadRequest with default headers values
func NewGetDevicesEroutersIDRouterRoutesBadRequest() *GetDevicesEroutersIDRouterRoutesBadRequest {
return &GetDevicesEroutersIDRouterRoutesBadRequest{}
}
/* GetDevicesEroutersIDRouterRoutesBadRequest describes a response with status code 400, with default header values.
Bad Request
*/
type GetDevicesEroutersIDRouterRoutesBadRequest struct {
Payload *models.Error
}
func (o *GetDevicesEroutersIDRouterRoutesBadRequest) Error() string {
return fmt.Sprintf("[GET /devices/erouters/{id}/router/routes][%d] getDevicesEroutersIdRouterRoutesBadRequest %+v", 400, o.Payload)
}
func (o *GetDevicesEroutersIDRouterRoutesBadRequest) GetPayload() *models.Error {
return o.Payload
}
func (o *GetDevicesEroutersIDRouterRoutesBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.Error)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetDevicesEroutersIDRouterRoutesUnauthorized creates a GetDevicesEroutersIDRouterRoutesUnauthorized with default headers values
func NewGetDevicesEroutersIDRouterRoutesUnauthorized() *GetDevicesEroutersIDRouterRoutesUnauthorized {
return &GetDevicesEroutersIDRouterRoutesUnauthorized{}
}
/* GetDevicesEroutersIDRouterRoutesUnauthorized describes a response with status code 401, with default header values.
Unauthorized
*/
type GetDevicesEroutersIDRouterRoutesUnauthorized struct {
Payload *models.Error
}
func (o *GetDevicesEroutersIDRouterRoutesUnauthorized) Error() string {
return fmt.Sprintf("[GET /devices/erouters/{id}/router/routes][%d] getDevicesEroutersIdRouterRoutesUnauthorized %+v", 401, o.Payload)
}
func (o *GetDevicesEroutersIDRouterRoutesUnauthorized) GetPayload() *models.Error {
return o.Payload
}
func (o *GetDevicesEroutersIDRouterRoutesUnauthorized) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.Error)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetDevicesEroutersIDRouterRoutesForbidden creates a GetDevicesEroutersIDRouterRoutesForbidden with default headers values
func NewGetDevicesEroutersIDRouterRoutesForbidden() *GetDevicesEroutersIDRouterRoutesForbidden {
return &GetDevicesEroutersIDRouterRoutesForbidden{}
}
/* GetDevicesEroutersIDRouterRoutesForbidden describes a response with status code 403, with default header values.
Forbidden
*/
type GetDevicesEroutersIDRouterRoutesForbidden struct {
Payload *models.Error
}
func (o *GetDevicesEroutersIDRouterRoutesForbidden) Error() string {
return fmt.Sprintf("[GET /devices/erouters/{id}/router/routes][%d] getDevicesEroutersIdRouterRoutesForbidden %+v", 403, o.Payload)
}
func (o *GetDevicesEroutersIDRouterRoutesForbidden) GetPayload() *models.Error {
return o.Payload
}
func (o *GetDevicesEroutersIDRouterRoutesForbidden) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.Error)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetDevicesEroutersIDRouterRoutesNotFound creates a GetDevicesEroutersIDRouterRoutesNotFound with default headers values
func NewGetDevicesEroutersIDRouterRoutesNotFound() *GetDevicesEroutersIDRouterRoutesNotFound {
return &GetDevicesEroutersIDRouterRoutesNotFound{}
}
/* GetDevicesEroutersIDRouterRoutesNotFound describes a response with status code 404, with default header values.
Not Found
*/
type GetDevicesEroutersIDRouterRoutesNotFound struct {
Payload *models.Error
}
func (o *GetDevicesEroutersIDRouterRoutesNotFound) Error() string {
return fmt.Sprintf("[GET /devices/erouters/{id}/router/routes][%d] getDevicesEroutersIdRouterRoutesNotFound %+v", 404, o.Payload)
}
func (o *GetDevicesEroutersIDRouterRoutesNotFound) GetPayload() *models.Error {
return o.Payload
}
func (o *GetDevicesEroutersIDRouterRoutesNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.Error)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetDevicesEroutersIDRouterRoutesInternalServerError creates a GetDevicesEroutersIDRouterRoutesInternalServerError with default headers values
func NewGetDevicesEroutersIDRouterRoutesInternalServerError() *GetDevicesEroutersIDRouterRoutesInternalServerError {
return &GetDevicesEroutersIDRouterRoutesInternalServerError{}
}
/* GetDevicesEroutersIDRouterRoutesInternalServerError describes a response with status code 500, with default header values.
Internal Server Error
*/
type GetDevicesEroutersIDRouterRoutesInternalServerError struct {
Payload *models.Error
}
func (o *GetDevicesEroutersIDRouterRoutesInternalServerError) Error() string {
return fmt.Sprintf("[GET /devices/erouters/{id}/router/routes][%d] getDevicesEroutersIdRouterRoutesInternalServerError %+v", 500, o.Payload)
}
func (o *GetDevicesEroutersIDRouterRoutesInternalServerError) GetPayload() *models.Error {
return o.Payload
}
func (o *GetDevicesEroutersIDRouterRoutesInternalServerError) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.Error)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
|
jo3-l/cp-practice | src/main/cpp/ccc/2000/surfing.cpp | <filename>src/main/cpp/ccc/2000/surfing.cpp
#include <bits/stdc++.h>
using namespace std;
unordered_map<string, vector<string>> adj;
int main() {
ios_base::sync_with_stdio(false);
cin.tie(nullptr);
cout.tie(nullptr);
int p;
cin >> p;
for (int i = 0; i < p; i++) {
string cur;
cin >> cur >> ws;
string buf;
for (getline(cin, buf); buf != "</HTML>"; getline(cin, buf)) {
int last_end = 0;
size_t pos;
while ((pos = buf.find("<A HREF=\"", last_end)) != string::npos) {
string link;
for (last_end = pos + 9; buf[last_end] != '"'; last_end++) link += buf[last_end];
adj[cur].push_back(link);
cout << "Link from " << cur << " to " << link << '\n';
last_end += 2;
}
}
}
string a;
for (cin >> a; a != "The"; cin >> a) {
string b;
cin >> b;
unordered_set<string> seen;
queue<string> q;
q.push(a);
seen.insert(a);
bool can_surf = false;
while (!q.empty()) {
string c = q.front();
q.pop();
for (string t : adj[c]) {
if (t == b) {
can_surf = true;
goto done;
}
if (!seen.count(t)) {
q.push(t);
seen.insert(t);
}
}
}
done:
cout << (can_surf ? "Can surf" : "Can't surf") << " from " << a << " to " << b << '.' << '\n';
}
return 0;
} |
v2dev/active-client-sdk-objc | client-library/PFTargetRelationship.h | <gh_stars>0
//
// PFTargetRelationship.h
// Percero
//
// Created by <NAME> on 3/20/13.
//
//
#import <Foundation/Foundation.h>
#import "PFRelationship.h"
@interface PFTargetRelationship : PFRelationship
- initUnidirectionalWithInverseClassName:(NSString *) inverseClassName
inversePropertyName:(NSString *) inversePropertyName
isRequired:(BOOL) isRequired
isCollection:(BOOL) isCollection;
- initBidirectionalWithPropertyName:(NSString *) propertyName
inverseClassName:(NSString *) inverseClassName
inversePropertyName:(NSString *) inversePropertyName
isRequired:(BOOL) isRequired
isCollection:(BOOL) isCollection;
@end
|
yoonchunjoo/pinpoint | profiler/src/main/java/com/navercorp/pinpoint/profiler/modifier/db/mysql/MySQLPreparedStatementJDBC4Modifier.java | /*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.modifier.db.mysql;
import com.navercorp.pinpoint.bootstrap.Agent;
import com.navercorp.pinpoint.bootstrap.instrument.ByteCodeInstrumentor;
import com.navercorp.pinpoint.bootstrap.instrument.InstrumentClass;
import com.navercorp.pinpoint.bootstrap.instrument.InstrumentException;
import com.navercorp.pinpoint.bootstrap.instrument.NotFoundInstrumentException;
import com.navercorp.pinpoint.bootstrap.instrument.Scope;
import com.navercorp.pinpoint.bootstrap.instrument.Type;
import com.navercorp.pinpoint.bootstrap.interceptor.Interceptor;
import com.navercorp.pinpoint.profiler.interceptor.ScopeDelegateStaticInterceptor;
import com.navercorp.pinpoint.profiler.modifier.AbstractModifier;
import com.navercorp.pinpoint.profiler.modifier.db.interceptor.PreparedStatementBindVariableInterceptor;
import com.navercorp.pinpoint.profiler.util.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Method;
import java.security.ProtectionDomain;
import java.util.Arrays;
import java.util.List;
/**
* @author emeroad
*/
public class MySQLPreparedStatementJDBC4Modifier extends AbstractModifier {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
public MySQLPreparedStatementJDBC4Modifier(ByteCodeInstrumentor byteCodeInstrumentor, Agent agent) {
super(byteCodeInstrumentor, agent);
}
public String getTargetClass() {
return "com/mysql/jdbc/JDBC4PreparedStatement";
}
@Override
public byte[] modify(ClassLoader classLoader, String className, ProtectionDomain protectedDomain, byte[] classFileBuffer) {
if (logger.isInfoEnabled()) {
logger.info("Modifing. {}", className);
}
try {
InstrumentClass preparedStatement = byteCodeInstrumentor.getClass(classLoader, className, classFileBuffer);
bindVariableIntercept(preparedStatement, classLoader, protectedDomain);
return preparedStatement.toBytecode();
} catch (InstrumentException e) {
if (logger.isWarnEnabled()) {
logger.warn("{} modify fail. Cause:{}", this.getClass().getSimpleName(), e.getMessage(), e);
}
return null;
}
}
private void bindVariableIntercept(InstrumentClass preparedStatement, ClassLoader classLoader, ProtectionDomain protectedDomain) throws InstrumentException {
// TODO Need to add parameter type to filter arguments
// Cannot specify methods without parameter type information because each JDBC driver has different API.
BindVariableFilter exclude = new IncludeBindVariableFilter(new String[]{"setRowId", "setNClob", "setSQLXML"});
List<Method> bindMethod = PreparedStatementUtils.findBindVariableSetMethod(exclude);
// TODO Do we have to utilize this logic?
// It would be better to create util api in bci package which adds interceptors to multiple methods.
final Scope scope = byteCodeInstrumentor.getScope(MYSQLScope.SCOPE_NAME);
Interceptor interceptor = new ScopeDelegateStaticInterceptor(new PreparedStatementBindVariableInterceptor(), scope);
int interceptorId = -1;
for (Method method : bindMethod) {
String methodName = method.getName();
String[] parameterType = JavaAssistUtils.getParameterType(method.getParameterTypes());
try {
if (interceptorId == -1) {
interceptorId = preparedStatement.addInterceptor(methodName, parameterType, interceptor, Type.after);
} else {
preparedStatement.reuseInterceptor(methodName, parameterType, interceptorId, Type.after);
}
} catch (NotFoundInstrumentException e) {
// Cannot find bind variable setter method. This is not an error. logging will be enough.
// Did not log stack trace intentionally
if (logger.isDebugEnabled()) {
logger.debug("bindVariable api not found. method:{} param:{} Cause:{}", methodName, Arrays.toString(parameterType), e.getMessage());
}
}
}
}
}
|
amantewary/Scrawl | app/src/main/java/com/example/amantewary/scrawl/Handlers/UserClass.java | <gh_stars>1-10
package com.example.amantewary.scrawl.Handlers;
import com.google.gson.annotations.SerializedName;
public class UserClass {
@SerializedName("password")
String password;
@SerializedName("email")
String email;
@SerializedName("username")
String username;
@SerializedName("error")
String error;
@SerializedName("error_msg")
String error_msg;
@SerializedName("userId")
Integer userId;
public UserClass(String email, String username, Integer userId) {
this.email = email;
this.username = username;
this.userId = userId;
}
public String getError_msg() {
return error_msg;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getError() {
return error;
}
public Integer getUserId() {
return userId;
}
}
|
tefra/xsdata-w3c-tests | output/models/ms_data/regex/re_k16_xsd/__init__.py | from output.models.ms_data.regex.re_k16_xsd.re_k16 import (
Regex,
Doc,
)
__all__ = [
"Regex",
"Doc",
]
|
nemith/cxgo | runtime/stdio/format_test.go | <gh_stars>100-1000
package stdio
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestFormat(t *testing.T) {
cases := []struct {
name string
format string
words []formatWord
}{
{
name: "plain",
format: `some string here`,
words: []formatWord{
{Str: "some string here"},
},
},
{
name: "string",
format: `%s`,
words: []formatWord{
{Str: "%s", Verb: true},
},
},
{
name: "wstring",
format: `%S`,
words: []formatWord{
{Str: "%S", Verb: true},
},
},
{
name: "int",
format: `%d`,
words: []formatWord{
{Str: "%d", Verb: true},
},
},
{
name: "verb repeat",
format: `%dd`,
words: []formatWord{
{Str: "%d", Verb: true},
{Str: "d"},
},
},
{
name: "mixed start",
format: `%d num`,
words: []formatWord{
{Str: "%d", Verb: true},
{Str: " num"},
},
},
{
name: "mixed start 2",
format: `%dnum`,
words: []formatWord{
{Str: "%d", Verb: true},
{Str: "num"},
},
},
{
name: "mixed middle",
format: `v = %d num`,
words: []formatWord{
{Str: "v = "},
{Str: "%d", Verb: true},
{Str: " num"},
},
},
{
name: "mixed middle 2",
format: `v=%dnum`,
words: []formatWord{
{Str: "v="},
{Str: "%d", Verb: true},
{Str: "num"},
},
},
{
name: "mixed end",
format: `v = %d`,
words: []formatWord{
{Str: "v = "},
{Str: "%d", Verb: true},
},
},
{
name: "mixed end 2",
format: `v=%d`,
words: []formatWord{
{Str: "v="},
{Str: "%d", Verb: true},
},
},
{
name: "percents",
format: `%d%% = %%%d %%`,
words: []formatWord{
{Str: "%d", Verb: true},
{Str: "%%", Verb: true},
{Str: " = "},
{Str: "%%", Verb: true},
{Str: "%d", Verb: true},
{Str: " "},
{Str: "%%", Verb: true},
},
},
{
name: "percents 2",
format: `%%%d = %d%% `,
words: []formatWord{
{Str: "%%", Verb: true},
{Str: "%d", Verb: true},
{Str: " = "},
{Str: "%d", Verb: true},
{Str: "%%", Verb: true},
{Str: " "},
},
},
{
name: "all types",
format: "%%%c%d%ld%10d%010d%x%o%#x%#o%4.2f%+.0e%E%*d%s%S%%",
words: []formatWord{
{Str: "%%", Verb: true},
{Str: "%c", Verb: true},
{Str: "%d", Verb: true},
{Str: "%ld", Verb: true},
{Str: "%10d", Verb: true},
{Str: "%010d", Verb: true},
{Str: "%x", Verb: true},
{Str: "%o", Verb: true},
{Str: "%#x", Verb: true},
{Str: "%#o", Verb: true},
{Str: "%4.2f", Verb: true},
{Str: "%+.0e", Verb: true},
{Str: "%E", Verb: true},
{Str: "%*d", Verb: true},
{Str: "%s", Verb: true},
{Str: "%S", Verb: true},
{Str: "%%", Verb: true},
},
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
words := parseFormat(c.format)
require.Equal(t, c.words, words)
})
}
}
|
nick87ds/MaterialeSerate | PercorsoDati/Lab3-4/etl/transform.py | <filename>PercorsoDati/Lab3-4/etl/transform.py<gh_stars>10-100
# Second phase of ETL: Transformations on the data
import logging
import pandas as pd
from config import ETL_EXTRACTION_CONFIG
from etl.utils import remove_dollar
from etl.extract import import_data
def tf_listings(filename: str, dataframe: pd.DataFrame) -> pd.DataFrame:
# Keep only some columns
listings = dataframe[
[
"id",
"name",
"longitude",
"latitude",
"listing_url",
"instant_bookable",
"host_response_time",
"review_scores_rating",
"property_type",
"room_type",
"accommodates",
"bathrooms",
"bedrooms",
"beds",
"reviews_per_month",
"amenities",
"number_of_reviews",
"price",
]
]
# Remove the dollars
listings = listings.assign(price=remove_dollar(listings.price))
listings[["price"]]
return listings
def tf_reviews(filename: str, dataframe: pd.DataFrame) -> pd.DataFrame:
# Date to datetime
reviews = dataframe.assign(date=pd.to_datetime(dataframe["date"]))
# Create month and year new columns
reviews["year"] = reviews["date"].dt.year
reviews["month"] = reviews["date"].dt.month
reviews = reviews.sort_values(["year", "month"], ascending=False)
return reviews
def tf_calendar(filename: str, dataframe: pd.DataFrame):
# Create date from datetime
calendar = dataframe.assign(date=pd.to_datetime(dataframe["date"]))
# Adjust the price
calendar = calendar.assign(
price=pd.to_numeric(
calendar.price.str.replace("$", "").str.replace(",", "")
),
)
# Define year and month
calendar["year"] = pd.DatetimeIndex(calendar["date"]).year
calendar["month"] = pd.DatetimeIndex(calendar["date"]).month
calendar = calendar.sort_values(["year", "month"], ascending=False)
# Map logic values
calendar["available"] = calendar.available.map({"t": True, "f": False})
return calendar
def transform(dataframes: dict) -> pd.DataFrame:
logging.info("Start transformations")
if dataframes is None:
logging.info("Input dataset not loaded, try to reload it")
dataframes = import_data(remote=False, config=ETL_EXTRACTION_CONFIG)
# Get the names of the tables
list_tables = list(dataframes.keys())
# Get the pandas dataframes
list_dataframe = list(dataframes.values())
file1 = list_dataframe[0]
file2 = list_dataframe[1]
file3 = list_dataframe[2]
file1_name = list_tables[0]
file2_name = list_tables[1]
file3_name = list_tables[2]
logging.info(f"Start transforming {file1_name} file")
listings = tf_listings(file1_name, file1)
logging.info(f"Start transforming {file2_name} file")
reviews = tf_reviews(file2_name, file2)
logging.info(f"Start transforming {file3_name} file")
calendar = tf_calendar(file3_name, file3)
# Compose the dictionary and return
datasets = {
file1_name: listings,
file2_name: reviews,
file3_name: calendar,
}
return datasets |
jokva/cosmopolitan | libc/log/appendresourcereport.c | <filename>libc/log/appendresourcereport.c
/*-*- mode:c;indent-tabs-mode:nil;c-basic-offset:2;tab-width:8;coding:utf-8 -*-│
│vi: set net ft=c ts=2 sts=2 sw=2 fenc=utf-8 :vi│
╞══════════════════════════════════════════════════════════════════════════════╡
│ Copyright 2021 <NAME> │
│ │
│ Permission to use, copy, modify, and/or distribute this software for │
│ any purpose with or without fee is hereby granted, provided that the │
│ above copyright notice and this permission notice appear in all copies. │
│ │
│ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL │
│ WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED │
│ WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE │
│ AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL │
│ DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR │
│ PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER │
│ TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR │
│ PERFORMANCE OF THIS SOFTWARE. │
╚─────────────────────────────────────────────────────────────────────────────*/
#include "libc/calls/struct/rusage.h"
#include "libc/fmt/itoa.h"
#include "libc/log/log.h"
#include "libc/math.h"
#include "libc/runtime/clktck.h"
#include "libc/stdio/append.internal.h"
static void AppendInt(char **b, int64_t x) {
char buf[27], *e;
e = FormatInt64Thousands(buf, x);
appendd(b, buf, e - buf);
}
static void AppendMetric(char **b, const char *s1, int64_t x, const char *s2,
const char *nl) {
appends(b, s1);
AppendInt(b, x);
appends(b, s2);
appends(b, nl);
}
static void AppendUnit(char **b, int64_t x, const char *s) {
AppendInt(b, x);
appendw(b, ' ');
appends(b, s);
if (x == 1) {
appendw(b, 's');
}
}
/**
* Generates process resource usage report.
*/
void AppendResourceReport(char **b, struct rusage *ru, const char *nl) {
char ibuf[27];
long utime, stime;
long double ticks;
if (ru->ru_maxrss) {
AppendMetric(b, "ballooned to ", ru->ru_maxrss, "kb in size", nl);
}
if ((utime = ru->ru_utime.tv_sec * 1000000 + ru->ru_utime.tv_usec) |
(stime = ru->ru_stime.tv_sec * 1000000 + ru->ru_stime.tv_usec)) {
appends(b, "needed ");
AppendInt(b, utime + stime);
appends(b, "us cpu (");
AppendInt(b, (long double)stime / (utime + stime) * 100);
appends(b, "% kernel)");
appends(b, nl);
ticks = ceill((long double)(utime + stime) / (1000000.L / CLK_TCK));
if (ru->ru_idrss) {
AppendMetric(b, "needed ", lroundl(ru->ru_idrss / ticks),
" memory on average", nl);
}
if (ru->ru_isrss) {
AppendMetric(b, "needed ", lroundl(ru->ru_isrss / ticks),
" stack on average", nl);
}
if (ru->ru_ixrss) {
AppendMetric(b, "needed ", lroundl(ru->ru_ixrss / ticks),
" shared on average", nl);
}
}
if (ru->ru_minflt || ru->ru_majflt) {
appends(b, "caused ");
AppendInt(b, ru->ru_minflt + ru->ru_majflt);
appends(b, " page faults (");
AppendInt(
b, (long double)ru->ru_minflt / (ru->ru_minflt + ru->ru_majflt) * 100);
appends(b, "% memcpy)");
appends(b, nl);
}
if (ru->ru_nvcsw + ru->ru_nivcsw > 1) {
AppendInt(b, ru->ru_nvcsw + ru->ru_nivcsw);
appends(b, " context switch (");
AppendInt(b,
(long double)ru->ru_nvcsw / (ru->ru_nvcsw + ru->ru_nivcsw) * 100);
appends(b, "% consensual)");
appends(b, nl);
}
if (ru->ru_msgrcv || ru->ru_msgsnd) {
appends(b, "received ");
AppendUnit(b, ru->ru_msgrcv, "message");
appends(b, " and sent ");
AppendInt(b, ru->ru_msgsnd);
appends(b, nl);
}
if (ru->ru_inblock || ru->ru_oublock) {
appends(b, "performed ");
AppendUnit(b, ru->ru_inblock, "read");
appends(b, " and ");
AppendInt(b, ru->ru_oublock);
appends(b, " write i/o operations");
appends(b, nl);
}
if (ru->ru_nsignals) {
appends(b, "received ");
AppendUnit(b, ru->ru_nsignals, "signal");
appends(b, nl);
}
if (ru->ru_nswap) {
appends(b, "got swapped ");
AppendUnit(b, ru->ru_nswap, "time");
appends(b, nl);
}
}
|
calvinkim27/test0620 | midauth/web/user.py | # -*- coding: utf-8 -*-
import datetime
import urllib
import urllib2
import json
from flask import Blueprint
from flask import request
from flask import render_template, redirect, url_for, abort
import flask.ext.login
from flask.ext.login import login_required, current_user
from flask.ext.oauthlib.client import OAuthException
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.inspection import inspect
import formencode.schema
import formencode.validators as _v
from formencode.compound import Pipe
from formencode.variabledecode import variable_decode
from dateutil.tz import tzutc
from midauth.models.user import User, UserStatus, Email, _LOGIN_NAME_RE
from midauth.models.cred import GoogleOAuth2
from .application import login_manager, oauth_client, get_session, respond
from .dispatch import resource_url
blueprint = Blueprint('user', __name__)
google_oauth2 = oauth_client.remote_app(
'google_oauth2',
request_token_params={'scope': [
u'https://www.googleapis.com/auth/userinfo.profile',
u'https://www.googleapis.com/auth/userinfo.email',
]},
base_url=u'https://www.googleapis.com/',
request_token_url=None,
access_token_method='POST',
access_token_url=u'https://accounts.google.com/o/oauth2/token',
authorize_url=u'https://accounts.google.com/o/oauth2/auth',
# for lazy loading
# TODO: apply https://github.com/lepture/flask-oauthlib/issues/23
consumer_key=None,
consumer_secret=None,
)
@login_manager.user_loader
def load_user(user_id):
s = get_session()
return s.query(User).get(user_id)
@blueprint.route('', endpoint='list')
def list_():
users = get_session().query(User).filter(User.active)
return render_template('user/list.html', users=users)
@blueprint.route('/<user>')
def get(user):
"""사용자 정보를 불러옵니다.
**응답 예시**:
.. code-block:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
“id” : “19b76b9c3b924a9bb3cc482732f019e4”,
“uid”: “kroisse”,
“name”: “유은총”,
“nick”: “가제트”,
“groups”: [
“http://midauth-sample.smartstudy.co.kr/groups/devops”,
“http://midauth-sample.smartstudy.co.kr/groups/developers”,
“http://midauth-sample.smartstudy.co.kr/groups/task-force”,
],
“emails”: [
“<EMAIL>”,
“<EMAIL>”,
]
}
:param user: 사용자의 login name
:statuscode 200: 정상 요청
====== ===
키 값
====== ===
id unique, 변경 불가
login 사용자의 ID. URI에 쓸 수 있는 형태여야 함. unique, 변경 가능
name 사용자의 실명, 변경 가능
nick 사용자의 별명, 변경 가능
groups 사용자가 소속된 그룹의 URI 목록
emails 사용자의 이메일 주소 목록
====== ===
:statuscode 404: 해당 이름의 사용자가 없음
"""
s = get_session()
try:
user = s.query(User).filter_by(login=user).one()
except NoResultFound:
abort(404)
return respond(user, 'user/get.html', picture_url=user.picture_url)
@blueprint.route('/login')
def login():
if current_user.is_authenticated():
return redirect(resource_url(current_user))
next_url = request.values.get('next')
return render_template('user/login.html', next=next_url)
@blueprint.route('/login', methods=['DELETE'])
@login_required
def logout():
flask.ext.login.logout_user()
next_url = request.values.get('next', url_for('.login'))
return redirect(next_url)
class UserRegistrationSchema(formencode.schema.Schema):
login = Pipe(validators=[
_v.Regex(_LOGIN_NAME_RE, strip=True),
_v.MaxLength(inspect(User).c.login.type.length),
_v.NotEmpty(messages={'empty': 'Please enter an login'}),
])
name = Pipe(validators=[
_v.MaxLength(inspect(User).c.name.type.length),
_v.NotEmpty(messages={'empty': 'Please enter your real name'}),
])
@blueprint.route('/register', methods=['POST'])
def register():
form = variable_decode(request.form)
s = get_session()
try:
user_id = form.pop('user_id')
except KeyError:
abort(400)
user = s.query(User).get(user_id)
next_url = form.pop('next', None)
if user is None or user.status != UserStatus.unregistered:
abort(400)
try:
data = UserRegistrationSchema().to_python(form)
except formencode.Invalid as e:
return render_template('user/register.html',
formdata=request.form, formerror=e.error_dict)
user.status = UserStatus.active
user.login = data['login']
user.name = data['name']
user.created_at = datetime.datetime.now(tzutc())
s.commit()
flask.ext.login.login_user(user)
if not next_url:
next_url = resource_url(user)
return redirect(next_url)
def register_form(user):
if not isinstance(user, User):
raise TypeError('user should be a {0!r}, not {1!r}'.format(User, user))
formdata = {
'login': user.login,
'name': user.name,
'next': request.values.get('next', ''),
'user_id': user.pk,
}
return render_template('user/register.html',
formdata=formdata)
@blueprint.route('/auth/google')
def auth_google():
return google_oauth2.authorize(callback=url_for('.auth_google_after',
_external=True))
@blueprint.route('/auth/google/callback')
@google_oauth2.authorized_handler
def auth_google_after(resp):
if resp is None:
return 'Access denied: ' \
'reason={0.error_reason} error={0.error_description}' \
.format(request.args)
elif isinstance(resp, OAuthException):
return redirect(url_for('.auth_google'))
id_token = decode_jwt(resp['id_token'])
user_key = id_token['user_id']
s = get_session()
try:
cred = s.query(GoogleOAuth2).filter_by(key=user_key).one()
except NoResultFound:
# 회원가입
token = (resp['access_token'], '')
r = google_oauth2.get('oauth2/v3/userinfo', token=token)
userinfo = r.data
user = User(None, userinfo['name'], status=UserStatus.unregistered)
user.emails.append(Email(userinfo['email'], verified=True))
user.primary_email = userinfo['email']
cred = GoogleOAuth2(user, resp, user_key=user_key)
s.add(user)
s.add(cred)
s.commit()
if cred.user.status == UserStatus.unregistered:
return register_form(cred.user)
else:
# 로그인
cred.token = resp
s.commit()
flask.ext.login.login_user(cred.user)
next_url = request.values.get('next', resource_url(cred.user))
return redirect(next_url)
def decode_jwt(token):
"""주어진 JWT 토큰을 디코드함
.. seealso:: `Validating an ID Token <https://developers.google.com/accounts/docs/OAuth2Login?hl=ko#validatinganidtoken>`_
"""
r = urllib2.urlopen('https://www.googleapis.com/oauth2/v1/tokeninfo',
data=urllib.urlencode({'id_token': token}))
return json.load(r)
|
comroid-git/guardian-framework | src/restless/main/java/org/comroid/restless/endpoint/QueryParameter.java | <filename>src/restless/main/java/org/comroid/restless/endpoint/QueryParameter.java
package org.comroid.restless.endpoint;
import org.intellij.lang.annotations.Language;
public final class QueryParameter {
private QueryParameter() {
throw new AbstractMethodError();
}
@Language("RegExp")
public static String regex(@Language("RegExp") String valueRegex) {
return regex(true, valueRegex);
}
@Language("RegExp")
public static String regex(boolean first, @Language("RegExp") String valueRegex) {
return String.format("(%s[\\w\\S]+?=%s)?",
first ? "\\?" : '&',
valueRegex
);
}
public static String param(String name, Object value) {
return param(true, name, value);
}
public static String param(boolean first, String name, Object value) {
return String.format("%s%s=%s", first ? '?' : '&', name, value);
}
}
|
amishasdf12/gsocStrace | syscall.c | /*
* Copyright (c) 1991, 1992 <NAME> <<EMAIL>>
* Copyright (c) 1993 <NAME> <<EMAIL>>
* Copyright (c) 1993, 1994, 1995, 1996 <NAME> <<EMAIL>>
* Copyright (c) 1996-1999 <NAME> <<EMAIL>>
* Copyright (c) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
* Linux for s390 port by <NAME>
* <<EMAIL>,<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "defs.h"
#include <sys/param.h>
/* for struct iovec */
#include <sys/uio.h>
#include "regs.h"
#include "ptrace.h"
#if defined(SPARC64)
# undef PTRACE_GETREGS
# define PTRACE_GETREGS PTRACE_GETREGS64
# undef PTRACE_SETREGS
# define PTRACE_SETREGS PTRACE_SETREGS64
#endif
#if defined SPARC64
# include <asm/psrcompat.h>
#elif defined SPARC
# include <asm/psr.h>
#endif
#ifdef IA64
# include <asm/rse.h>
#endif
#ifndef NT_PRSTATUS
# define NT_PRSTATUS 1
#endif
#ifndef NSIG
# warning: NSIG is not defined, using 32
# define NSIG 32
#endif
#include "syscall.h"
/* Define these shorthand notations to simplify the syscallent files. */
#define TD TRACE_DESC
#define TF TRACE_FILE
#define TI TRACE_IPC
#define TN TRACE_NETWORK
#define TP TRACE_PROCESS
#define TS TRACE_SIGNAL
#define TM TRACE_MEMORY
#define NF SYSCALL_NEVER_FAILS
#define MA MAX_ARGS
#define SI STACKTRACE_INVALIDATE_CACHE
#define SE STACKTRACE_CAPTURE_ON_ENTER
const struct_sysent sysent0[] = {
#include "syscallent.h"
};
#if SUPPORTED_PERSONALITIES > 1
static const struct_sysent sysent1[] = {
# include "syscallent1.h"
};
#endif
#if SUPPORTED_PERSONALITIES > 2
static const struct_sysent sysent2[] = {
# include "syscallent2.h"
};
#endif
/* Now undef them since short defines cause wicked namespace pollution. */
#undef TD
#undef TF
#undef TI
#undef TN
#undef TP
#undef TS
#undef TM
#undef NF
#undef MA
#undef SI
#undef SE
/*
* `ioctlent[012].h' files are automatically generated by the auxiliary
* program `ioctlsort', such that the list is sorted by the `code' field.
* This has the side-effect of resolving the _IO.. macros into
* plain integers, eliminating the need to include here everything
* in "/usr/include".
*/
const char *const errnoent0[] = {
#include "errnoent.h"
};
const char *const signalent0[] = {
#include "signalent.h"
};
const struct_ioctlent ioctlent0[] = {
#include "ioctlent0.h"
};
#if SUPPORTED_PERSONALITIES > 1
static const char *const errnoent1[] = {
# include "errnoent1.h"
};
static const char *const signalent1[] = {
# include "signalent1.h"
};
static const struct_ioctlent ioctlent1[] = {
# include "ioctlent1.h"
};
#endif
#if SUPPORTED_PERSONALITIES > 2
static const char *const errnoent2[] = {
# include "errnoent2.h"
};
static const char *const signalent2[] = {
# include "signalent2.h"
};
static const struct_ioctlent ioctlent2[] = {
# include "ioctlent2.h"
};
#endif
enum {
nsyscalls0 = ARRAY_SIZE(sysent0)
#if SUPPORTED_PERSONALITIES > 1
, nsyscalls1 = ARRAY_SIZE(sysent1)
# if SUPPORTED_PERSONALITIES > 2
, nsyscalls2 = ARRAY_SIZE(sysent2)
# endif
#endif
};
enum {
nerrnos0 = ARRAY_SIZE(errnoent0)
#if SUPPORTED_PERSONALITIES > 1
, nerrnos1 = ARRAY_SIZE(errnoent1)
# if SUPPORTED_PERSONALITIES > 2
, nerrnos2 = ARRAY_SIZE(errnoent2)
# endif
#endif
};
enum {
nsignals0 = ARRAY_SIZE(signalent0)
#if SUPPORTED_PERSONALITIES > 1
, nsignals1 = ARRAY_SIZE(signalent1)
# if SUPPORTED_PERSONALITIES > 2
, nsignals2 = ARRAY_SIZE(signalent2)
# endif
#endif
};
enum {
nioctlents0 = ARRAY_SIZE(ioctlent0)
#if SUPPORTED_PERSONALITIES > 1
, nioctlents1 = ARRAY_SIZE(ioctlent1)
# if SUPPORTED_PERSONALITIES > 2
, nioctlents2 = ARRAY_SIZE(ioctlent2)
# endif
#endif
};
#if SUPPORTED_PERSONALITIES > 1
const struct_sysent *sysent = sysent0;
const char *const *errnoent = errnoent0;
const char *const *signalent = signalent0;
const struct_ioctlent *ioctlent = ioctlent0;
#endif
unsigned nsyscalls = nsyscalls0;
unsigned nerrnos = nerrnos0;
unsigned nsignals = nsignals0;
unsigned nioctlents = nioctlents0;
unsigned num_quals;
qualbits_t *qual_vec[SUPPORTED_PERSONALITIES];
static const unsigned nsyscall_vec[SUPPORTED_PERSONALITIES] = {
nsyscalls0,
#if SUPPORTED_PERSONALITIES > 1
nsyscalls1,
#endif
#if SUPPORTED_PERSONALITIES > 2
nsyscalls2,
#endif
};
static const struct_sysent *const sysent_vec[SUPPORTED_PERSONALITIES] = {
sysent0,
#if SUPPORTED_PERSONALITIES > 1
sysent1,
#endif
#if SUPPORTED_PERSONALITIES > 2
sysent2,
#endif
};
enum {
MAX_NSYSCALLS1 = (nsyscalls0
#if SUPPORTED_PERSONALITIES > 1
> nsyscalls1 ? nsyscalls0 : nsyscalls1
#endif
),
MAX_NSYSCALLS2 = (MAX_NSYSCALLS1
#if SUPPORTED_PERSONALITIES > 2
> nsyscalls2 ? MAX_NSYSCALLS1 : nsyscalls2
#endif
),
MAX_NSYSCALLS = MAX_NSYSCALLS2,
/* We are ready for arches with up to 255 signals,
* even though the largest known signo is on MIPS and it is 128.
* The number of existing syscalls on all arches is
* larger that 255 anyway, so it is just a pedantic matter.
*/
MIN_QUALS = MAX_NSYSCALLS > 255 ? MAX_NSYSCALLS : 255
};
#if SUPPORTED_PERSONALITIES > 1
unsigned current_personality;
# ifndef current_wordsize
unsigned current_wordsize;
static const int personality_wordsize[SUPPORTED_PERSONALITIES] = {
PERSONALITY0_WORDSIZE,
PERSONALITY1_WORDSIZE,
# if SUPPORTED_PERSONALITIES > 2
PERSONALITY2_WORDSIZE,
# endif
};
# endif
void
set_personality(int personality)
{
nsyscalls = nsyscall_vec[personality];
sysent = sysent_vec[personality];
switch (personality) {
case 0:
errnoent = errnoent0;
nerrnos = nerrnos0;
ioctlent = ioctlent0;
nioctlents = nioctlents0;
signalent = signalent0;
nsignals = nsignals0;
break;
case 1:
errnoent = errnoent1;
nerrnos = nerrnos1;
ioctlent = ioctlent1;
nioctlents = nioctlents1;
signalent = signalent1;
nsignals = nsignals1;
break;
# if SUPPORTED_PERSONALITIES > 2
case 2:
errnoent = errnoent2;
nerrnos = nerrnos2;
ioctlent = ioctlent2;
nioctlents = nioctlents2;
signalent = signalent2;
nsignals = nsignals2;
break;
# endif
}
current_personality = personality;
# ifndef current_wordsize
current_wordsize = personality_wordsize[personality];
# endif
}
static void
update_personality(struct tcb *tcp, unsigned int personality)
{
if (personality == current_personality)
return;
set_personality(personality);
if (personality == tcp->currpers)
return;
tcp->currpers = personality;
# if defined(POWERPC64)
if (!qflag) {
static const char *const names[] = {"64 bit", "32 bit"};
fprintf(stderr, "[ Process PID=%d runs in %s mode. ]\n",
tcp->pid, names[personality]);
}
# elif defined(X86_64)
if (!qflag) {
static const char *const names[] = {"64 bit", "32 bit", "x32"};
fprintf(stderr, "[ Process PID=%d runs in %s mode. ]\n",
tcp->pid, names[personality]);
}
# elif defined(X32)
if (!qflag) {
static const char *const names[] = {"x32", "32 bit"};
fprintf(stderr, "[ Process PID=%d runs in %s mode. ]\n",
tcp->pid, names[personality]);
}
# elif defined(AARCH64)
if (!qflag) {
static const char *const names[] = {"32-bit", "AArch64"};
fprintf(stderr, "[ Process PID=%d runs in %s mode. ]\n",
tcp->pid, names[personality]);
}
# elif defined(TILE)
if (!qflag) {
static const char *const names[] = {"64-bit", "32-bit"};
fprintf(stderr, "[ Process PID=%d runs in %s mode. ]\n",
tcp->pid, names[personality]);
}
# endif
}
#endif
static int qual_syscall(), qual_signal(), qual_desc();
static const struct qual_options {
unsigned int bitflag;
const char *option_name;
int (*qualify)(const char *, int, int);
const char *argument_name;
} qual_options[] = {
{ QUAL_TRACE, "trace", qual_syscall, "system call" },
{ QUAL_TRACE, "t", qual_syscall, "system call" },
{ QUAL_ABBREV, "abbrev", qual_syscall, "system call" },
{ QUAL_ABBREV, "a", qual_syscall, "system call" },
{ QUAL_VERBOSE, "verbose", qual_syscall, "system call" },
{ QUAL_VERBOSE, "v", qual_syscall, "system call" },
{ QUAL_RAW, "raw", qual_syscall, "system call" },
{ QUAL_RAW, "x", qual_syscall, "system call" },
{ QUAL_SIGNAL, "signal", qual_signal, "signal" },
{ QUAL_SIGNAL, "signals", qual_signal, "signal" },
{ QUAL_SIGNAL, "s", qual_signal, "signal" },
{ QUAL_READ, "read", qual_desc, "descriptor" },
{ QUAL_READ, "reads", qual_desc, "descriptor" },
{ QUAL_READ, "r", qual_desc, "descriptor" },
{ QUAL_WRITE, "write", qual_desc, "descriptor" },
{ QUAL_WRITE, "writes", qual_desc, "descriptor" },
{ QUAL_WRITE, "w", qual_desc, "descriptor" },
{ 0, NULL, NULL, NULL },
};
static void
reallocate_qual(const unsigned int n)
{
unsigned p;
qualbits_t *qp;
for (p = 0; p < SUPPORTED_PERSONALITIES; p++) {
qp = qual_vec[p] = realloc(qual_vec[p], n * sizeof(qualbits_t));
if (!qp)
die_out_of_memory();
memset(&qp[num_quals], 0, (n - num_quals) * sizeof(qualbits_t));
}
num_quals = n;
}
static void
qualify_one(const unsigned int n, unsigned int bitflag, const int not, const int pers)
{
int p;
if (num_quals <= n)
reallocate_qual(n + 1);
for (p = 0; p < SUPPORTED_PERSONALITIES; p++) {
if (pers == p || pers < 0) {
if (not)
qual_vec[p][n] &= ~bitflag;
else
qual_vec[p][n] |= bitflag;
}
}
}
static int
qual_syscall(const char *s, const unsigned int bitflag, const int not)
{
int p;
unsigned int i;
int rc = -1;
if (*s >= '0' && *s <= '9') {
i = string_to_uint(s);
if (i >= MAX_NSYSCALLS)
return -1;
qualify_one(i, bitflag, not, -1);
return 0;
}
for (p = 0; p < SUPPORTED_PERSONALITIES; p++) {
for (i = 0; i < nsyscall_vec[p]; i++) {
if (sysent_vec[p][i].sys_name
&& strcmp(s, sysent_vec[p][i].sys_name) == 0
) {
qualify_one(i, bitflag, not, p);
rc = 0;
}
}
}
return rc;
}
static int
qual_signal(const char *s, const unsigned int bitflag, const int not)
{
unsigned int i;
if (*s >= '0' && *s <= '9') {
int signo = string_to_uint(s);
if (signo < 0 || signo > 255)
return -1;
qualify_one(signo, bitflag, not, -1);
return 0;
}
if (strncasecmp(s, "SIG", 3) == 0)
s += 3;
for (i = 0; i <= NSIG; i++) {
if (strcasecmp(s, signame(i) + 3) == 0) {
qualify_one(i, bitflag, not, -1);
return 0;
}
}
return -1;
}
static int
qual_desc(const char *s, const unsigned int bitflag, const int not)
{
if (*s >= '0' && *s <= '9') {
int desc = string_to_uint(s);
if (desc < 0 || desc > 0x7fff) /* paranoia */
return -1;
qualify_one(desc, bitflag, not, -1);
return 0;
}
return -1;
}
static int
lookup_class(const char *s)
{
if (strcmp(s, "file") == 0)
return TRACE_FILE;
if (strcmp(s, "ipc") == 0)
return TRACE_IPC;
if (strcmp(s, "network") == 0)
return TRACE_NETWORK;
if (strcmp(s, "process") == 0)
return TRACE_PROCESS;
if (strcmp(s, "signal") == 0)
return TRACE_SIGNAL;
if (strcmp(s, "desc") == 0)
return TRACE_DESC;
if (strcmp(s, "memory") == 0)
return TRACE_MEMORY;
return -1;
}
void
qualify(const char *s)
{
const struct qual_options *opt;
char *copy;
const char *p;
int not;
unsigned int i;
if (num_quals == 0)
reallocate_qual(MIN_QUALS);
opt = &qual_options[0];
for (i = 0; (p = qual_options[i].option_name); i++) {
unsigned int len = strlen(p);
if (strncmp(s, p, len) == 0 && s[len] == '=') {
opt = &qual_options[i];
s += len + 1;
break;
}
}
not = 0;
if (*s == '!') {
not = 1;
s++;
}
if (strcmp(s, "none") == 0) {
not = 1 - not;
s = "all";
}
if (strcmp(s, "all") == 0) {
for (i = 0; i < num_quals; i++) {
qualify_one(i, opt->bitflag, not, -1);
}
return;
}
for (i = 0; i < num_quals; i++) {
qualify_one(i, opt->bitflag, !not, -1);
}
copy = strdup(s);
if (!copy)
die_out_of_memory();
for (p = strtok(copy, ","); p; p = strtok(NULL, ",")) {
int n;
if (opt->bitflag == QUAL_TRACE && (n = lookup_class(p)) > 0) {
unsigned pers;
for (pers = 0; pers < SUPPORTED_PERSONALITIES; pers++) {
for (i = 0; i < nsyscall_vec[pers]; i++)
if (sysent_vec[pers][i].sys_flags & n)
qualify_one(i, opt->bitflag, not, pers);
}
continue;
}
if (opt->qualify(p, opt->bitflag, not)) {
error_msg_and_die("invalid %s '%s'",
opt->argument_name, p);
}
}
free(copy);
return;
}
#ifdef SYS_socket_subcall
static void
decode_socket_subcall(struct tcb *tcp)
{
unsigned long addr;
unsigned int i, n, size;
if (tcp->u_arg[0] < 0 || tcp->u_arg[0] >= SYS_socket_nsubcalls)
return;
tcp->scno = SYS_socket_subcall + tcp->u_arg[0];
tcp->qual_flg = qual_flags[tcp->scno];
tcp->s_ent = &sysent[tcp->scno];
addr = tcp->u_arg[1];
size = current_wordsize;
n = tcp->s_ent->nargs;
for (i = 0; i < n; ++i) {
if (size == sizeof(int)) {
unsigned int arg;
if (umove(tcp, addr, &arg) < 0)
arg = 0;
tcp->u_arg[i] = arg;
}
else {
unsigned long arg;
if (umove(tcp, addr, &arg) < 0)
arg = 0;
tcp->u_arg[i] = arg;
}
addr += size;
}
}
#endif
#ifdef SYS_ipc_subcall
static void
decode_ipc_subcall(struct tcb *tcp)
{
unsigned int i, n;
if (tcp->u_arg[0] < 0 || tcp->u_arg[0] >= SYS_ipc_nsubcalls)
return;
tcp->scno = SYS_ipc_subcall + tcp->u_arg[0];
tcp->qual_flg = qual_flags[tcp->scno];
tcp->s_ent = &sysent[tcp->scno];
n = tcp->s_ent->nargs;
for (i = 0; i < n; i++)
tcp->u_arg[i] = tcp->u_arg[i + 1];
}
#endif
int
printargs(struct tcb *tcp)
{
if (entering(tcp)) {
int i;
int n = tcp->s_ent->nargs;
for (i = 0; i < n; i++)
tprintf("%s%#lx", i ? ", " : "", tcp->u_arg[i]);
}
return 0;
}
int
printargs_lu(struct tcb *tcp)
{
if (entering(tcp)) {
int i;
int n = tcp->s_ent->nargs;
for (i = 0; i < n; i++)
tprintf("%s%lu", i ? ", " : "", tcp->u_arg[i]);
}
return 0;
}
int
printargs_ld(struct tcb *tcp)
{
if (entering(tcp)) {
int i;
int n = tcp->s_ent->nargs;
for (i = 0; i < n; i++)
tprintf("%s%ld", i ? ", " : "", tcp->u_arg[i]);
}
return 0;
}
#if defined(I386)
static struct user_regs_struct i386_regs;
long *const i386_esp_ptr = &i386_regs.esp;
# define ARCH_REGS_FOR_GETREGS i386_regs
#elif defined(X86_64) || defined(X32)
/*
* On i386, pt_regs and user_regs_struct are the same,
* but on 64 bit x86, user_regs_struct has six more fields:
* fs_base, gs_base, ds, es, fs, gs.
* PTRACE_GETREGS fills them too, so struct pt_regs would overflow.
*/
struct i386_user_regs_struct {
uint32_t ebx;
uint32_t ecx;
uint32_t edx;
uint32_t esi;
uint32_t edi;
uint32_t ebp;
uint32_t eax;
uint32_t xds;
uint32_t xes;
uint32_t xfs;
uint32_t xgs;
uint32_t orig_eax;
uint32_t eip;
uint32_t xcs;
uint32_t eflags;
uint32_t esp;
uint32_t xss;
};
static union {
struct user_regs_struct x86_64_r;
struct i386_user_regs_struct i386_r;
} x86_regs_union;
# define x86_64_regs x86_regs_union.x86_64_r
# define i386_regs x86_regs_union.i386_r
uint32_t *const i386_esp_ptr = &i386_regs.esp;
uint64_t *const x86_64_rsp_ptr = (uint64_t *) &x86_64_regs.rsp;
static struct iovec x86_io = {
.iov_base = &x86_regs_union
};
# define ARCH_REGS_FOR_GETREGSET x86_regs_union
# define ARCH_IOVEC_FOR_GETREGSET x86_io
#elif defined(IA64)
static struct pt_all_user_regs ia64_regs;
unsigned long *const ia64_frame_ptr = &ia64_regs.gr[12];
# define IA64_PSR_IS ((long)1 << 34)
# define ia64_ia32mode (ia64_regs.cr_ipsr & IA64_PSR_IS)
# define ARCH_REGS_FOR_GETREGS ia64_regs
#elif defined(POWERPC)
struct pt_regs ppc_regs; /* not static */
# define ARCH_REGS_FOR_GETREGS ppc_regs
#elif defined(M68K)
static long m68k_d0;
#elif defined(BFIN)
static long bfin_r0;
#elif defined(ARM)
static struct pt_regs arm_regs;
long *const arm_sp_ptr = &arm_regs.ARM_sp;
# define ARCH_REGS_FOR_GETREGS arm_regs
#elif defined(AARCH64)
struct arm_pt_regs {
int uregs[18];
};
# define ARM_cpsr uregs[16]
# define ARM_pc uregs[15]
# define ARM_lr uregs[14]
# define ARM_sp uregs[13]
# define ARM_ip uregs[12]
# define ARM_fp uregs[11]
# define ARM_r10 uregs[10]
# define ARM_r9 uregs[9]
# define ARM_r8 uregs[8]
# define ARM_r7 uregs[7]
# define ARM_r6 uregs[6]
# define ARM_r5 uregs[5]
# define ARM_r4 uregs[4]
# define ARM_r3 uregs[3]
# define ARM_r2 uregs[2]
# define ARM_r1 uregs[1]
# define ARM_r0 uregs[0]
# define ARM_ORIG_r0 uregs[17]
static union {
struct user_pt_regs aarch64_r;
struct arm_pt_regs arm_r;
} arm_regs_union;
# define aarch64_regs arm_regs_union.aarch64_r
# define arm_regs arm_regs_union.arm_r
uint64_t *const aarch64_sp_ptr = (uint64_t *) &aarch64_regs.sp;
uint32_t *const arm_sp_ptr = (uint32_t *) &arm_regs.ARM_sp;
static struct iovec aarch64_io = {
.iov_base = &arm_regs_union
};
# define ARCH_REGS_FOR_GETREGSET arm_regs_union
# define ARCH_IOVEC_FOR_GETREGSET aarch64_io
#elif defined(ALPHA)
static long alpha_r0;
static long alpha_a3;
#elif defined(AVR32)
static struct pt_regs avr32_regs;
# define ARCH_REGS_FOR_GETREGS avr32_regs
#elif defined(SPARC) || defined(SPARC64)
struct pt_regs sparc_regs; /* not static */
# define ARCH_REGS_FOR_GETREGS sparc_regs
#elif defined(MIPS)
struct mips_regs mips_regs; /* not static */
/* PTRACE_GETREGS on MIPS is available since linux v2.6.15. */
# define ARCH_REGS_FOR_GETREGS mips_regs
#elif defined(S390) || defined(S390X)
/* PTRACE_GETREGSET on S390 is available since linux v2.6.27. */
static struct user_regs_struct s390_regset;
unsigned long *const s390_frame_ptr = &s390_regset.gprs[15];
# define ARCH_REGS_FOR_GETREGSET s390_regset
#elif defined(HPPA)
static long hppa_r28;
#elif defined(SH)
static long sh_r0;
#elif defined(SH64)
static long sh64_r9;
#elif defined(CRISV10) || defined(CRISV32)
static long cris_r10;
#elif defined(TILE)
struct pt_regs tile_regs; /* not static */
# define ARCH_REGS_FOR_GETREGS tile_regs
#elif defined(MICROBLAZE)
static long microblaze_r3;
#elif defined(OR1K)
static struct user_regs_struct or1k_regs;
# define ARCH_REGS_FOR_GETREGSET or1k_regs
#elif defined(METAG)
static struct user_gp_regs metag_regs;
# define ARCH_REGS_FOR_GETREGSET metag_regs
#elif defined(XTENSA)
static long xtensa_a2;
# elif defined(ARC)
static struct user_regs_struct arc_regs;
# define ARCH_REGS_FOR_GETREGSET arc_regs
#endif
static long get_regs_error;
#if defined(SPARC) || defined(SPARC64) || defined(IA64) || defined(SH)
long
getrval2(struct tcb *tcp)
{
long val;
# if defined(SPARC) || defined(SPARC64)
val = sparc_regs.u_regs[U_REG_O1];
# elif defined(SH)
if (upeek(tcp->pid, 4*(REG_REG0+1), &val) < 0)
return -1;
# elif defined(IA64)
val = ia64_regs.gr[9];
# endif
return val;
}
#endif
void
print_pc(struct tcb *tcp)
{
const char *fmt;
const char *bad;
#ifdef current_wordsize
# define pc_wordsize current_wordsize
#else
# define pc_wordsize personality_wordsize[tcp->currpers]
#endif
if (pc_wordsize == 4) {
fmt = "[%08lx] ";
bad = "[????????] ";
} else {
fmt = "[%016lx] ";
bad = "[????????????????] ";
}
#undef pc_wordsize
#define PRINTBADPC tprints(bad)
if (get_regs_error) {
PRINTBADPC;
return;
}
#if defined(I386)
tprintf(fmt, i386_regs.eip);
#elif defined(X86_64) || defined(X32)
if (x86_io.iov_len == sizeof(i386_regs))
tprintf(fmt, (unsigned long) i386_regs.eip);
else
tprintf(fmt, (unsigned long) x86_64_regs.rip);
#elif defined(S390) || defined(S390X)
tprintf(fmt, s390_regset.psw.addr);
#elif defined(IA64)
tprintf(fmt, ia64_regs.br[0]);
#elif defined(POWERPC)
tprintf(fmt, ppc_regs.nip);
#elif defined(M68K)
long pc;
if (upeek(tcp->pid, 4*PT_PC, &pc) < 0) {
PRINTBADPC;
return;
}
tprintf(fmt, pc);
#elif defined(ALPHA)
long pc;
if (upeek(tcp->pid, REG_PC, &pc) < 0) {
PRINTBADPC;
return;
}
tprintf(fmt, pc);
#elif defined(SPARC)
tprintf(fmt, sparc_regs.pc);
#elif defined(SPARC64)
tprintf(fmt, sparc_regs.tpc);
#elif defined(HPPA)
long pc;
if (upeek(tcp->pid, PT_IAOQ0, &pc) < 0) {
PRINTBADPC;
return;
}
tprintf(fmt, pc);
#elif defined MIPS
tprintf(fmt, (unsigned long) mips_REG_EPC);
#elif defined(SH)
long pc;
if (upeek(tcp->pid, 4*REG_PC, &pc) < 0) {
PRINTBADPC;
return;
}
tprintf(fmt, pc);
#elif defined(SH64)
long pc;
if (upeek(tcp->pid, REG_PC, &pc) < 0) {
PRINTBADPC;
return;
}
tprintf(fmt, pc);
#elif defined(AARCH64)
if (aarch64_io.iov_len == sizeof(arm_regs))
tprintf(fmt, (unsigned long) arm_regs.ARM_pc);
else
tprintf(fmt, (unsigned long) aarch64_regs.pc);
#elif defined(ARM)
tprintf(fmt, arm_regs.ARM_pc);
#elif defined(AVR32)
tprintf(fmt, avr32_regs.pc);
#elif defined(BFIN)
long pc;
if (upeek(tcp->pid, PT_PC, &pc) < 0) {
PRINTBADPC;
return;
}
tprintf(fmt, pc);
#elif defined(CRISV10)
long pc;
if (upeek(tcp->pid, 4*PT_IRP, &pc) < 0) {
PRINTBADPC;
return;
}
tprintf(fmt, pc);
#elif defined(CRISV32)
long pc;
if (upeek(tcp->pid, 4*PT_ERP, &pc) < 0) {
PRINTBADPC;
return;
}
tprintf(fmt, pc);
#elif defined(TILE)
tprintf(fmt, (unsigned long) tile_regs.pc);
#elif defined(OR1K)
tprintf(fmt, or1k_regs.pc);
#elif defined(METAG)
tprintf(fmt, metag_regs.pc);
#elif defined(XTENSA)
long pc;
if (upeek(tcp->pid, REG_PC, &pc) < 0) {
PRINTBADPC;
return;
}
tprintf(fmt, pc);
#elif defined(ARC)
tprintf(fmt, arc_regs.efa);
#else
# warning print_pc is not implemented for this architecture
PRINTBADPC;
#endif /* architecture */
}
/*
* Shuffle syscall numbers so that we don't have huge gaps in syscall table.
* The shuffling should be an involution: shuffle_scno(shuffle_scno(n)) == n.
*/
#if defined(ARM) || defined(AARCH64) /* So far only 32-bit ARM needs this */
static long
shuffle_scno(unsigned long scno)
{
if (scno < ARM_FIRST_SHUFFLED_SYSCALL)
return scno;
/* __ARM_NR_cmpxchg? Swap with LAST_ORDINARY+1 */
if (scno == ARM_FIRST_SHUFFLED_SYSCALL)
return 0x000ffff0;
if (scno == 0x000ffff0)
return ARM_FIRST_SHUFFLED_SYSCALL;
#define ARM_SECOND_SHUFFLED_SYSCALL (ARM_FIRST_SHUFFLED_SYSCALL + 1)
/*
* Is it ARM specific syscall?
* Swap [0x000f0000, 0x000f0000 + LAST_SPECIAL] range
* with [SECOND_SHUFFLED, SECOND_SHUFFLED + LAST_SPECIAL] range.
*/
if (scno >= 0x000f0000 &&
scno <= 0x000f0000 + ARM_LAST_SPECIAL_SYSCALL) {
return scno - 0x000f0000 + ARM_SECOND_SHUFFLED_SYSCALL;
}
if (scno <= ARM_SECOND_SHUFFLED_SYSCALL + ARM_LAST_SPECIAL_SYSCALL) {
return scno + 0x000f0000 - ARM_SECOND_SHUFFLED_SYSCALL;
}
return scno;
}
#else
# define shuffle_scno(scno) ((long)(scno))
#endif
static char*
undefined_scno_name(struct tcb *tcp)
{
static char buf[sizeof("syscall_%lu") + sizeof(long)*3];
sprintf(buf, "syscall_%lu", shuffle_scno(tcp->scno));
return buf;
}
#ifdef POWERPC
/*
* PTRACE_GETREGS was added to the PowerPC kernel in v2.6.23,
* we provide a slow fallback for old kernels.
*/
static int powerpc_getregs_old(pid_t pid)
{
int i;
long r;
if (iflag) {
r = upeek(pid, sizeof(long) * PT_NIP, (long *)&ppc_regs.nip);
if (r)
goto out;
}
#ifdef POWERPC64 /* else we never use it */
r = upeek(pid, sizeof(long) * PT_MSR, (long *)&ppc_regs.msr);
if (r)
goto out;
#endif
r = upeek(pid, sizeof(long) * PT_CCR, (long *)&ppc_regs.ccr);
if (r)
goto out;
r = upeek(pid, sizeof(long) * PT_ORIG_R3, (long *)&ppc_regs.orig_gpr3);
if (r)
goto out;
for (i = 0; i <= 8; i++) {
r = upeek(pid, sizeof(long) * (PT_R0 + i),
(long *)&ppc_regs.gpr[i]);
if (r)
goto out;
}
out:
return r;
}
#endif
void
clear_regs(void)
{
get_regs_error = -1;
}
#if defined ARCH_REGS_FOR_GETREGSET
static long
get_regset(pid_t pid)
{
# ifdef ARCH_IOVEC_FOR_GETREGSET
/* variable iovec */
ARCH_IOVEC_FOR_GETREGSET.iov_len = sizeof(ARCH_REGS_FOR_GETREGSET);
return ptrace(PTRACE_GETREGSET, pid, NT_PRSTATUS,
&ARCH_IOVEC_FOR_GETREGSET);
# else
/* constant iovec */
static struct iovec io = {
.iov_base = &ARCH_REGS_FOR_GETREGSET,
.iov_len = sizeof(ARCH_REGS_FOR_GETREGSET)
};
return ptrace(PTRACE_GETREGSET, pid, NT_PRSTATUS, &io);
# endif
}
#endif /* ARCH_REGS_FOR_GETREGSET */
void
get_regs(pid_t pid)
{
#ifdef ARCH_REGS_FOR_GETREGSET
# ifdef X86_64
/* Try PTRACE_GETREGSET first, fallback to PTRACE_GETREGS. */
static int getregset_support;
if (getregset_support >= 0) {
get_regs_error = get_regset(pid);
if (getregset_support > 0)
return;
if (get_regs_error >= 0) {
getregset_support = 1;
return;
}
if (errno == EPERM || errno == ESRCH)
return;
getregset_support = -1;
}
/* Use old method, with unreliable heuristical detection of 32-bitness. */
x86_io.iov_len = sizeof(x86_64_regs);
get_regs_error = ptrace(PTRACE_GETREGS, pid, NULL, &x86_64_regs);
if (!get_regs_error && x86_64_regs.cs == 0x23) {
x86_io.iov_len = sizeof(i386_regs);
/*
* The order is important: i386_regs and x86_64_regs
* are overlaid in memory!
*/
i386_regs.ebx = x86_64_regs.rbx;
i386_regs.ecx = x86_64_regs.rcx;
i386_regs.edx = x86_64_regs.rdx;
i386_regs.esi = x86_64_regs.rsi;
i386_regs.edi = x86_64_regs.rdi;
i386_regs.ebp = x86_64_regs.rbp;
i386_regs.eax = x86_64_regs.rax;
/* i386_regs.xds = x86_64_regs.ds; unused by strace */
/* i386_regs.xes = x86_64_regs.es; ditto... */
/* i386_regs.xfs = x86_64_regs.fs; */
/* i386_regs.xgs = x86_64_regs.gs; */
i386_regs.orig_eax = x86_64_regs.orig_rax;
i386_regs.eip = x86_64_regs.rip;
/* i386_regs.xcs = x86_64_regs.cs; */
/* i386_regs.eflags = x86_64_regs.eflags; */
i386_regs.esp = x86_64_regs.rsp;
/* i386_regs.xss = x86_64_regs.ss; */
}
# else /* !X86_64 */
/* Assume that PTRACE_GETREGSET works. */
get_regs_error = get_regset(pid);
# endif
#elif defined ARCH_REGS_FOR_GETREGS
# if defined SPARC || defined SPARC64
/* SPARC systems have the meaning of data and addr reversed */
get_regs_error = ptrace(PTRACE_GETREGS, pid, (char *)&ARCH_REGS_FOR_GETREGS, 0);
# elif defined POWERPC
static bool old_kernel = 0;
if (old_kernel)
goto old;
get_regs_error = ptrace(PTRACE_GETREGS, pid, NULL, &ARCH_REGS_FOR_GETREGS);
if (get_regs_error && errno == EIO) {
old_kernel = 1;
old:
get_regs_error = powerpc_getregs_old(pid);
}
# else
/* Assume that PTRACE_GETREGS works. */
get_regs_error = ptrace(PTRACE_GETREGS, pid, NULL, &ARCH_REGS_FOR_GETREGS);
# endif
#else /* !ARCH_REGS_FOR_GETREGSET && !ARCH_REGS_FOR_GETREGS */
# warning get_regs is not implemented for this architecture yet
get_regs_error = 0;
#endif
}
/* Returns:
* 0: "ignore this ptrace stop", bail out of trace_syscall_entering() silently.
* 1: ok, continue in trace_syscall_entering().
* other: error, trace_syscall_entering() should print error indicator
* ("????" etc) and bail out.
*/
static int
get_scno(struct tcb *tcp)
{
long scno = 0;
#if defined(S390) || defined(S390X)
scno = s390_regset.gprs[2];
#elif defined(POWERPC)
scno = ppc_regs.gpr[0];
# ifdef POWERPC64
unsigned int currpers;
/*
* Check for 64/32 bit mode.
* Embedded implementations covered by Book E extension of PPC use
* bit 0 (CM) of 32-bit Machine state register (MSR).
* Other implementations use bit 0 (SF) of 64-bit MSR.
*/
currpers = (ppc_regs.msr & 0x8000000080000000) ? 0 : 1;
update_personality(tcp, currpers);
# endif
#elif defined(AVR32)
scno = avr32_regs.r8;
#elif defined(BFIN)
if (upeek(tcp->pid, PT_ORIG_P0, &scno))
return -1;
#elif defined(I386)
scno = i386_regs.orig_eax;
#elif defined(X86_64) || defined(X32)
# ifndef __X32_SYSCALL_BIT
# define __X32_SYSCALL_BIT 0x40000000
# endif
unsigned int currpers;
# if 1
/* GETREGSET of NT_PRSTATUS tells us regset size,
* which unambiguously detects i386.
*
* Linux kernel distinguishes x86-64 and x32 processes
* solely by looking at __X32_SYSCALL_BIT:
* arch/x86/include/asm/compat.h::is_x32_task():
* if (task_pt_regs(current)->orig_ax & __X32_SYSCALL_BIT)
* return true;
*/
if (x86_io.iov_len == sizeof(i386_regs)) {
scno = i386_regs.orig_eax;
currpers = 1;
} else {
scno = x86_64_regs.orig_rax;
currpers = 0;
if (scno & __X32_SYSCALL_BIT) {
/*
* Syscall number -1 requires special treatment:
* it might be a side effect of SECCOMP_RET_ERRNO
* filtering that sets orig_rax to -1
* in some versions of linux kernel.
* If that is the case, then
* __X32_SYSCALL_BIT logic does not apply.
*/
if ((long long) x86_64_regs.orig_rax != -1) {
scno -= __X32_SYSCALL_BIT;
currpers = 2;
} else {
# ifdef X32
currpers = 2;
# endif
}
}
}
# elif 0
/* cs = 0x33 for long mode (native 64 bit and x32)
* cs = 0x23 for compatibility mode (32 bit)
* ds = 0x2b for x32 mode (x86-64 in 32 bit)
*/
scno = x86_64_regs.orig_rax;
switch (x86_64_regs.cs) {
case 0x23: currpers = 1; break;
case 0x33:
if (x86_64_regs.ds == 0x2b) {
currpers = 2;
scno &= ~__X32_SYSCALL_BIT;
} else
currpers = 0;
break;
default:
fprintf(stderr, "Unknown value CS=0x%08X while "
"detecting personality of process "
"PID=%d\n", (int)x86_64_regs.cs, tcp->pid);
currpers = current_personality;
break;
}
# elif 0
/* This version analyzes the opcode of a syscall instruction.
* (int 0x80 on i386 vs. syscall on x86-64)
* It works, but is too complicated, and strictly speaking, unreliable.
*/
unsigned long call, rip = x86_64_regs.rip;
/* sizeof(syscall) == sizeof(int 0x80) == 2 */
rip -= 2;
errno = 0;
call = ptrace(PTRACE_PEEKTEXT, tcp->pid, (char *)rip, (char *)0);
if (errno)
fprintf(stderr, "ptrace_peektext failed: %s\n",
strerror(errno));
switch (call & 0xffff) {
/* x86-64: syscall = 0x0f 0x05 */
case 0x050f: currpers = 0; break;
/* i386: int 0x80 = 0xcd 0x80 */
case 0x80cd: currpers = 1; break;
default:
currpers = current_personality;
fprintf(stderr,
"Unknown syscall opcode (0x%04X) while "
"detecting personality of process "
"PID=%d\n", (int)call, tcp->pid);
break;
}
# endif
# ifdef X32
/* If we are built for a x32 system, then personality 0 is x32
* (not x86_64), and stracing of x86_64 apps is not supported.
* Stracing of i386 apps is still supported.
*/
if (currpers == 0) {
fprintf(stderr, "syscall_%lu(...) in unsupported "
"64-bit mode of process PID=%d\n",
scno, tcp->pid);
return 0;
}
currpers &= ~2; /* map 2,1 to 0,1 */
# endif
update_personality(tcp, currpers);
#elif defined(IA64)
if (ia64_ia32mode) {
scno = ia64_regs.gr[0];
} else {
scno = ia64_regs.gr[15];
}
#elif defined(AARCH64)
switch (aarch64_io.iov_len) {
case sizeof(aarch64_regs):
/* We are in 64-bit mode */
scno = aarch64_regs.regs[8];
update_personality(tcp, 1);
break;
case sizeof(arm_regs):
/* We are in 32-bit mode */
/* Note: we don't support OABI, unlike 32-bit ARM build */
scno = arm_regs.ARM_r7;
scno = shuffle_scno(scno);
update_personality(tcp, 0);
break;
}
#elif defined(ARM)
if (arm_regs.ARM_ip != 0) {
/* It is not a syscall entry */
fprintf(stderr, "pid %d stray syscall exit\n", tcp->pid);
tcp->flags |= TCB_INSYSCALL;
return 0;
}
/* Note: we support only 32-bit CPUs, not 26-bit */
# if !defined(__ARM_EABI__) || ENABLE_ARM_OABI
if (arm_regs.ARM_cpsr & 0x20)
/* Thumb mode */
goto scno_in_r7;
/* ARM mode */
/* Check EABI/OABI by examining SVC insn's low 24 bits */
errno = 0;
scno = ptrace(PTRACE_PEEKTEXT, tcp->pid, (void *)(arm_regs.ARM_pc - 4), NULL);
if (errno)
return -1;
/* EABI syscall convention? */
if ((unsigned long) scno != 0xef000000) {
/* No, it's OABI */
if ((scno & 0x0ff00000) != 0x0f900000) {
fprintf(stderr, "pid %d unknown syscall trap 0x%08lx\n",
tcp->pid, scno);
return -1;
}
/* Fixup the syscall number */
scno &= 0x000fffff;
} else {
scno_in_r7:
scno = arm_regs.ARM_r7;
}
# else /* __ARM_EABI__ || !ENABLE_ARM_OABI */
scno = arm_regs.ARM_r7;
# endif
scno = shuffle_scno(scno);
#elif defined(M68K)
if (upeek(tcp->pid, 4*PT_ORIG_D0, &scno) < 0)
return -1;
#elif defined(MIPS)
scno = mips_REG_V0;
if (!SCNO_IN_RANGE(scno)) {
if (mips_REG_A3 == 0 || mips_REG_A3 == (uint64_t) -1) {
if (debug_flag)
fprintf(stderr, "stray syscall exit: v0 = %ld\n", scno);
return 0;
}
}
#elif defined(ALPHA)
if (upeek(tcp->pid, REG_A3, &alpha_a3) < 0)
return -1;
if (upeek(tcp->pid, REG_R0, &scno) < 0)
return -1;
/*
* Do some sanity checks to figure out if it's
* really a syscall entry
*/
if (!SCNO_IN_RANGE(scno)) {
if (alpha_a3 == 0 || alpha_a3 == -1) {
if (debug_flag)
fprintf(stderr, "stray syscall exit: r0 = %ld\n", scno);
return 0;
}
}
#elif defined(SPARC) || defined(SPARC64)
/* Disassemble the syscall trap. */
/* Retrieve the syscall trap instruction. */
unsigned long trap;
errno = 0;
# if defined(SPARC64)
trap = ptrace(PTRACE_PEEKTEXT, tcp->pid, (char *)sparc_regs.tpc, 0);
trap >>= 32;
# else
trap = ptrace(PTRACE_PEEKTEXT, tcp->pid, (char *)sparc_regs.pc, 0);
# endif
if (errno)
return -1;
/* Disassemble the trap to see what personality to use. */
switch (trap) {
case 0x91d02010:
/* Linux/SPARC syscall trap. */
update_personality(tcp, 0);
break;
case 0x91d0206d:
/* Linux/SPARC64 syscall trap. */
update_personality(tcp, 2);
break;
case 0x91d02000:
/* SunOS syscall trap. (pers 1) */
fprintf(stderr, "syscall: SunOS no support\n");
return -1;
case 0x91d02008:
/* Solaris 2.x syscall trap. (per 2) */
update_personality(tcp, 1);
break;
case 0x91d02009:
/* NetBSD/FreeBSD syscall trap. */
fprintf(stderr, "syscall: NetBSD/FreeBSD not supported\n");
return -1;
case 0x91d02027:
/* Solaris 2.x gettimeofday */
update_personality(tcp, 1);
break;
default:
# if defined(SPARC64)
fprintf(stderr, "syscall: unknown syscall trap %08lx %016lx\n", trap, sparc_regs.tpc);
# else
fprintf(stderr, "syscall: unknown syscall trap %08lx %08lx\n", trap, sparc_regs.pc);
# endif
return -1;
}
/* Extract the system call number from the registers. */
if (trap == 0x91d02027)
scno = 156;
else
scno = sparc_regs.u_regs[U_REG_G1];
if (scno == 0) {
scno = sparc_regs.u_regs[U_REG_O0];
memmove(&sparc_regs.u_regs[U_REG_O0], &sparc_regs.u_regs[U_REG_O1], 7*sizeof(sparc_regs.u_regs[0]));
}
#elif defined(HPPA)
if (upeek(tcp->pid, PT_GR20, &scno) < 0)
return -1;
#elif defined(SH)
/*
* In the new syscall ABI, the system call number is in R3.
*/
if (upeek(tcp->pid, 4*(REG_REG0+3), &scno) < 0)
return -1;
if (scno < 0) {
/* Odd as it may seem, a glibc bug has been known to cause
glibc to issue bogus negative syscall numbers. So for
our purposes, make strace print what it *should* have been */
long correct_scno = (scno & 0xff);
if (debug_flag)
fprintf(stderr,
"Detected glibc bug: bogus system call"
" number = %ld, correcting to %ld\n",
scno,
correct_scno);
scno = correct_scno;
}
#elif defined(SH64)
if (upeek(tcp->pid, REG_SYSCALL, &scno) < 0)
return -1;
scno &= 0xFFFF;
#elif defined(CRISV10) || defined(CRISV32)
if (upeek(tcp->pid, 4*PT_R9, &scno) < 0)
return -1;
#elif defined(TILE)
unsigned int currpers;
scno = tile_regs.regs[10];
# ifdef __tilepro__
currpers = 1;
# else
# ifndef PT_FLAGS_COMPAT
# define PT_FLAGS_COMPAT 0x10000 /* from Linux 3.8 on */
# endif
if (tile_regs.flags & PT_FLAGS_COMPAT)
currpers = 1;
else
currpers = 0;
# endif
update_personality(tcp, currpers);
#elif defined(MICROBLAZE)
if (upeek(tcp->pid, 0, &scno) < 0)
return -1;
#elif defined(OR1K)
scno = or1k_regs.gpr[11];
#elif defined(METAG)
scno = metag_regs.dx[0][1]; /* syscall number in D1Re0 (D1.0) */
#elif defined(XTENSA)
if (upeek(tcp->pid, SYSCALL_NR, &scno) < 0)
return -1;
# elif defined(ARC)
scno = arc_regs.scratch.r8;
#endif
tcp->scno = scno;
if (SCNO_IS_VALID(tcp->scno)) {
tcp->s_ent = &sysent[scno];
tcp->qual_flg = qual_flags[scno];
} else {
static const struct_sysent unknown = {
.nargs = MAX_ARGS,
.sys_flags = 0,
.sys_func = printargs,
.sys_name = "unknown", /* not used */
};
tcp->s_ent = &unknown;
tcp->qual_flg = UNDEFINED_SCNO | QUAL_RAW | DEFAULT_QUAL_FLAGS;
}
return 1;
}
/*
* Cannot rely on __kernel_[u]long_t being defined,
* it is quite a recent feature of <asm/posix_types.h>.
*/
#ifdef __kernel_long_t
typedef __kernel_long_t kernel_long_t;
typedef __kernel_ulong_t kernel_ulong_t;
#else
# ifdef X32
typedef long long kernel_long_t;
typedef unsigned long long kernel_ulong_t;
# else
typedef long kernel_long_t;
typedef unsigned long kernel_ulong_t;
# endif
#endif
/*
* Check the syscall return value register value for whether it is
* a negated errno code indicating an error, or a success return value.
*/
static inline bool
is_negated_errno(kernel_ulong_t val)
{
/* Linux kernel defines MAX_ERRNO to 4095. */
kernel_ulong_t max = -(kernel_long_t) 4095;
#if SUPPORTED_PERSONALITIES > 1 && SIZEOF_LONG > 4
if (current_wordsize < sizeof(val)) {
val = (uint32_t) val;
max = (uint32_t) max;
}
#elif defined X32
/*
* current_wordsize is 4 even in personality 0 (native X32)
* but truncation _must not_ be done in it.
* can't check current_wordsize here!
*/
if (current_personality != 0) {
val = (uint32_t) val;
max = (uint32_t) max;
}
#endif
return val >= max;
}
/* Return -1 on error or 1 on success (never 0!) */
static int
get_syscall_args(struct tcb *tcp)
{
int i, nargs;
nargs = tcp->s_ent->nargs;
#if defined(S390) || defined(S390X)
(void)i;
(void)nargs;
tcp->u_arg[0] = s390_regset.orig_gpr2;
tcp->u_arg[1] = s390_regset.gprs[3];
tcp->u_arg[2] = s390_regset.gprs[4];
tcp->u_arg[3] = s390_regset.gprs[5];
tcp->u_arg[4] = s390_regset.gprs[6];
tcp->u_arg[5] = s390_regset.gprs[7];
#elif defined(ALPHA)
for (i = 0; i < nargs; ++i)
if (upeek(tcp->pid, REG_A0+i, &tcp->u_arg[i]) < 0)
return -1;
#elif defined(IA64)
if (!ia64_ia32mode) {
unsigned long *rbs_end =
(unsigned long *) ia64_regs.ar[PT_AUR_BSP];
unsigned long sof = (ia64_regs.cfm >> 0) & 0x7f;
unsigned long sol = (ia64_regs.cfm >> 7) & 0x7f;
unsigned long *out0 = ia64_rse_skip_regs(rbs_end, -sof + sol);
for (i = 0; i < nargs; ++i) {
if (umoven(tcp, (unsigned long) ia64_rse_skip_regs(out0, i),
sizeof(long), (char *) &tcp->u_arg[i]) < 0)
return -1;
}
} else {
(void)i;
(void)nargs;
/* truncate away IVE sign-extension */
tcp->u_arg[0] = 0xffffffff & ia64_regs.gr[11]; /* EBX */
tcp->u_arg[1] = 0xffffffff & ia64_regs.gr[ 9]; /* ECX */
tcp->u_arg[2] = 0xffffffff & ia64_regs.gr[10]; /* EDX */
tcp->u_arg[3] = 0xffffffff & ia64_regs.gr[14]; /* ESI */
tcp->u_arg[4] = 0xffffffff & ia64_regs.gr[15]; /* EDI */
tcp->u_arg[5] = 0xffffffff & ia64_regs.gr[13]; /* EBP */
}
#elif defined LINUX_MIPSN64
(void)i;
(void)nargs;
tcp->u_arg[0] = mips_REG_A0;
tcp->u_arg[1] = mips_REG_A1;
tcp->u_arg[2] = mips_REG_A2;
tcp->u_arg[3] = mips_REG_A3;
tcp->u_arg[4] = mips_REG_A4;
tcp->u_arg[5] = mips_REG_A5;
#elif defined LINUX_MIPSN32
(void)i;
(void)nargs;
tcp->u_arg[0] = tcp->ext_arg[0] = mips_REG_A0;
tcp->u_arg[1] = tcp->ext_arg[1] = mips_REG_A1;
tcp->u_arg[2] = tcp->ext_arg[2] = mips_REG_A2;
tcp->u_arg[3] = tcp->ext_arg[3] = mips_REG_A3;
tcp->u_arg[4] = tcp->ext_arg[4] = mips_REG_A4;
tcp->u_arg[5] = tcp->ext_arg[5] = mips_REG_A5;
#elif defined LINUX_MIPSO32
(void)i;
(void)nargs;
tcp->u_arg[0] = mips_REG_A0;
tcp->u_arg[1] = mips_REG_A1;
tcp->u_arg[2] = mips_REG_A2;
tcp->u_arg[3] = mips_REG_A3;
if (nargs > 4) {
umoven(tcp, mips_REG_SP + 4 * 4,
(nargs - 4) * sizeof(tcp->u_arg[0]),
(char *)(tcp->u_arg + 4));
}
#elif defined(POWERPC)
(void)i;
(void)nargs;
tcp->u_arg[0] = ppc_regs.orig_gpr3;
tcp->u_arg[1] = ppc_regs.gpr[4];
tcp->u_arg[2] = ppc_regs.gpr[5];
tcp->u_arg[3] = ppc_regs.gpr[6];
tcp->u_arg[4] = ppc_regs.gpr[7];
tcp->u_arg[5] = ppc_regs.gpr[8];
#elif defined(SPARC) || defined(SPARC64)
for (i = 0; i < nargs; ++i)
tcp->u_arg[i] = sparc_regs.u_regs[U_REG_O0 + i];
#elif defined(HPPA)
for (i = 0; i < nargs; ++i)
if (upeek(tcp->pid, PT_GR26-4*i, &tcp->u_arg[i]) < 0)
return -1;
#elif defined(ARM) || defined(AARCH64)
# if defined(AARCH64)
if (tcp->currpers == 1)
for (i = 0; i < nargs; ++i)
tcp->u_arg[i] = aarch64_regs.regs[i];
else
# endif
for (i = 0; i < nargs; ++i)
tcp->u_arg[i] = arm_regs.uregs[i];
#elif defined(AVR32)
(void)i;
(void)nargs;
tcp->u_arg[0] = avr32_regs.r12;
tcp->u_arg[1] = avr32_regs.r11;
tcp->u_arg[2] = avr32_regs.r10;
tcp->u_arg[3] = avr32_regs.r9;
tcp->u_arg[4] = avr32_regs.r5;
tcp->u_arg[5] = avr32_regs.r3;
#elif defined(BFIN)
static const int argreg[MAX_ARGS] = { PT_R0, PT_R1, PT_R2, PT_R3, PT_R4, PT_R5 };
for (i = 0; i < nargs; ++i)
if (upeek(tcp->pid, argreg[i], &tcp->u_arg[i]) < 0)
return -1;
#elif defined(SH)
static const int syscall_regs[MAX_ARGS] = {
4 * (REG_REG0+4), 4 * (REG_REG0+5), 4 * (REG_REG0+6),
4 * (REG_REG0+7), 4 * (REG_REG0 ), 4 * (REG_REG0+1)
};
for (i = 0; i < nargs; ++i)
if (upeek(tcp->pid, syscall_regs[i], &tcp->u_arg[i]) < 0)
return -1;
#elif defined(SH64)
int i;
/* Registers used by SH5 Linux system calls for parameters */
static const int syscall_regs[MAX_ARGS] = { 2, 3, 4, 5, 6, 7 };
for (i = 0; i < nargs; ++i)
if (upeek(tcp->pid, REG_GENERAL(syscall_regs[i]), &tcp->u_arg[i]) < 0)
return -1;
#elif defined(I386)
(void)i;
(void)nargs;
tcp->u_arg[0] = i386_regs.ebx;
tcp->u_arg[1] = i386_regs.ecx;
tcp->u_arg[2] = i386_regs.edx;
tcp->u_arg[3] = i386_regs.esi;
tcp->u_arg[4] = i386_regs.edi;
tcp->u_arg[5] = i386_regs.ebp;
#elif defined(X86_64) || defined(X32)
(void)i;
(void)nargs;
if (x86_io.iov_len != sizeof(i386_regs)) {
/* x86-64 or x32 ABI */
tcp->u_arg[0] = x86_64_regs.rdi;
tcp->u_arg[1] = x86_64_regs.rsi;
tcp->u_arg[2] = x86_64_regs.rdx;
tcp->u_arg[3] = x86_64_regs.r10;
tcp->u_arg[4] = x86_64_regs.r8;
tcp->u_arg[5] = x86_64_regs.r9;
# ifdef X32
tcp->ext_arg[0] = x86_64_regs.rdi;
tcp->ext_arg[1] = x86_64_regs.rsi;
tcp->ext_arg[2] = x86_64_regs.rdx;
tcp->ext_arg[3] = x86_64_regs.r10;
tcp->ext_arg[4] = x86_64_regs.r8;
tcp->ext_arg[5] = x86_64_regs.r9;
# endif
} else {
/* i386 ABI */
/* Zero-extend from 32 bits */
/* Use widen_to_long(tcp->u_arg[N]) in syscall handlers
* if you need to use *sign-extended* parameter.
*/
tcp->u_arg[0] = (long)(uint32_t)i386_regs.ebx;
tcp->u_arg[1] = (long)(uint32_t)i386_regs.ecx;
tcp->u_arg[2] = (long)(uint32_t)i386_regs.edx;
tcp->u_arg[3] = (long)(uint32_t)i386_regs.esi;
tcp->u_arg[4] = (long)(uint32_t)i386_regs.edi;
tcp->u_arg[5] = (long)(uint32_t)i386_regs.ebp;
}
#elif defined(MICROBLAZE)
for (i = 0; i < nargs; ++i)
if (upeek(tcp->pid, (5 + i) * 4, &tcp->u_arg[i]) < 0)
return -1;
#elif defined(CRISV10) || defined(CRISV32)
static const int crisregs[MAX_ARGS] = {
4*PT_ORIG_R10, 4*PT_R11, 4*PT_R12,
4*PT_R13 , 4*PT_MOF, 4*PT_SRP
};
for (i = 0; i < nargs; ++i)
if (upeek(tcp->pid, crisregs[i], &tcp->u_arg[i]) < 0)
return -1;
#elif defined(TILE)
for (i = 0; i < nargs; ++i)
tcp->u_arg[i] = tile_regs.regs[i];
#elif defined(M68K)
for (i = 0; i < nargs; ++i)
if (upeek(tcp->pid, (i < 5 ? i : i + 2)*4, &tcp->u_arg[i]) < 0)
return -1;
#elif defined(OR1K)
(void)nargs;
for (i = 0; i < 6; ++i)
tcp->u_arg[i] = or1k_regs.gpr[3 + i];
#elif defined(METAG)
for (i = 0; i < nargs; i++)
/* arguments go backwards from D1Ar1 (D1.3) */
tcp->u_arg[i] = ((unsigned long *)&metag_regs.dx[3][1])[-i];
#elif defined(XTENSA)
/* arg0: a6, arg1: a3, arg2: a4, arg3: a5, arg4: a8, arg5: a9 */
static const int xtensaregs[MAX_ARGS] = { 6, 3, 4, 5, 8, 9 };
for (i = 0; i < nargs; ++i)
if (upeek(tcp->pid, REG_A_BASE + xtensaregs[i], &tcp->u_arg[i]) < 0)
return -1;
# elif defined(ARC)
long *arc_args = &arc_regs.scratch.r0;
for (i = 0; i < nargs; ++i)
tcp->u_arg[i] = *arc_args--;
#else /* Other architecture (32bits specific) */
for (i = 0; i < nargs; ++i)
if (upeek(tcp->pid, i*4, &tcp->u_arg[i]) < 0)
return -1;
#endif
return 1;
}
static int
trace_syscall_entering(struct tcb *tcp)
{
int res, scno_good;
scno_good = res = (get_regs_error ? -1 : get_scno(tcp));
if (res == 0)
return res;
if (res == 1)
res = get_syscall_args(tcp);
if (res != 1) {
printleader(tcp);
if (scno_good != 1)
tprints("????" /* anti-trigraph gap */ "(");
else if (tcp->qual_flg & UNDEFINED_SCNO)
tprintf("%s(", undefined_scno_name(tcp));
else
tprintf("%s(", tcp->s_ent->sys_name);
/*
* " <unavailable>" will be added later by the code which
* detects ptrace errors.
*/
goto ret;
}
if ( sys_execve == tcp->s_ent->sys_func
# if defined(SPARC) || defined(SPARC64)
|| sys_execv == tcp->s_ent->sys_func
# endif
) {
hide_log_until_execve = 0;
}
#if defined(SYS_socket_subcall) || defined(SYS_ipc_subcall)
while (1) {
# ifdef SYS_socket_subcall
if (tcp->s_ent->sys_func == sys_socketcall) {
decode_socket_subcall(tcp);
break;
}
# endif
# ifdef SYS_ipc_subcall
if (tcp->s_ent->sys_func == sys_ipc) {
decode_ipc_subcall(tcp);
break;
}
# endif
break;
}
#endif
if (!(tcp->qual_flg & QUAL_TRACE)
|| (tracing_paths && !pathtrace_match(tcp))
) {
tcp->flags |= TCB_INSYSCALL | TCB_FILTERED;
return 0;
}
tcp->flags &= ~TCB_FILTERED;
if (cflag == CFLAG_ONLY_STATS || hide_log_until_execve) {
res = 0;
goto ret;
}
#ifdef USE_LIBUNWIND
if (stack_trace_enabled) {
if (tcp->s_ent->sys_flags & STACKTRACE_CAPTURE_ON_ENTER)
unwind_capture_stacktrace(tcp);
}
#endif
printleader(tcp);
if (tcp->qual_flg & UNDEFINED_SCNO)
{
if(jflag==1)
tprintf(" \"Function\" : %s\n", undefined_scno_name(tcp));
else
tprintf("%s(", undefined_scno_name(tcp));
}
else
{
if(jflag==1)
tprintf(" \"Function\" : %s\n", tcp->s_ent->sys_name);
else
tprintf("%s(", tcp->s_ent->sys_name);
}
if(jflag==1)
tprints(" \"Arguments\" : (");
if ((tcp->qual_flg & QUAL_RAW) && tcp->s_ent->sys_func != sys_exit)
{
res = printargs(tcp);
}
else
res = tcp->s_ent->sys_func(tcp);
fflush(tcp->outf);
ret:
tcp->flags |= TCB_INSYSCALL;
/* Measure the entrance time as late as possible to avoid errors. */
if (Tflag || cflag)
gettimeofday(&tcp->etime, NULL);
return res;
}
/* Returns:
* 1: ok, continue in trace_syscall_exiting().
* -1: error, trace_syscall_exiting() should print error indicator
* ("????" etc) and bail out.
*/
static int
get_syscall_result(struct tcb *tcp)
{
#if defined ARCH_REGS_FOR_GETREGSET || defined ARCH_REGS_FOR_GETREGS
/* already done by get_regs */
#elif defined(BFIN)
if (upeek(tcp->pid, PT_R0, &bfin_r0) < 0)
return -1;
#elif defined(M68K)
if (upeek(tcp->pid, 4*PT_D0, &m68k_d0) < 0)
return -1;
#elif defined(ALPHA)
if (upeek(tcp->pid, REG_A3, &alpha_a3) < 0)
return -1;
if (upeek(tcp->pid, REG_R0, &alpha_r0) < 0)
return -1;
#elif defined(HPPA)
if (upeek(tcp->pid, PT_GR28, &hppa_r28) < 0)
return -1;
#elif defined(SH)
/* new syscall ABI returns result in R0 */
if (upeek(tcp->pid, 4*REG_REG0, (long *)&sh_r0) < 0)
return -1;
#elif defined(SH64)
/* ABI defines result returned in r9 */
if (upeek(tcp->pid, REG_GENERAL(9), (long *)&sh64_r9) < 0)
return -1;
#elif defined(CRISV10) || defined(CRISV32)
if (upeek(tcp->pid, 4*PT_R10, &cris_r10) < 0)
return -1;
#elif defined(MICROBLAZE)
if (upeek(tcp->pid, 3 * 4, µblaze_r3) < 0)
return -1;
#elif defined(XTENSA)
if (upeek(tcp->pid, REG_A_BASE + 2, &xtensa_a2) < 0)
return -1;
#else
# error get_syscall_result is not implemented for this architecture
#endif
return 1;
}
/* Returns:
* 1: ok, continue in trace_syscall_exiting().
* -1: error, trace_syscall_exiting() should print error indicator
* ("????" etc) and bail out.
*/
static void
get_error(struct tcb *tcp)
{
int u_error = 0;
int check_errno = 1;
if (tcp->s_ent->sys_flags & SYSCALL_NEVER_FAILS) {
check_errno = 0;
}
#if defined(S390) || defined(S390X)
if (check_errno && is_negated_errno(s390_regset.gprs[2])) {
tcp->u_rval = -1;
u_error = -s390_regset.gprs[2];
}
else {
tcp->u_rval = s390_regset.gprs[2];
}
#elif defined(I386)
if (check_errno && is_negated_errno(i386_regs.eax)) {
tcp->u_rval = -1;
u_error = -i386_regs.eax;
}
else {
tcp->u_rval = i386_regs.eax;
}
#elif defined(X86_64) || defined(X32)
/*
* In X32, return value is 64-bit (llseek uses one).
* Using merely "long rax" would not work.
*/
kernel_long_t rax;
if (x86_io.iov_len == sizeof(i386_regs)) {
/* Sign extend from 32 bits */
rax = (int32_t) i386_regs.eax;
} else {
rax = x86_64_regs.rax;
}
if (check_errno && is_negated_errno(rax)) {
tcp->u_rval = -1;
u_error = -rax;
}
else {
tcp->u_rval = rax;
# ifdef X32
/* tcp->u_rval contains a truncated value */
tcp->u_lrval = rax;
# endif
}
#elif defined(IA64)
if (ia64_ia32mode) {
int err = ia64_regs.gr[8];
if (check_errno && is_negated_errno(err)) {
tcp->u_rval = -1;
u_error = -err;
}
else {
tcp->u_rval = err;
}
} else {
if (check_errno && ia64_regs.gr[10]) {
tcp->u_rval = -1;
u_error = ia64_regs.gr[8];
} else {
tcp->u_rval = ia64_regs.gr[8];
}
}
#elif defined(MIPS)
if (check_errno && mips_REG_A3) {
tcp->u_rval = -1;
u_error = mips_REG_V0;
} else {
# if defined LINUX_MIPSN32
tcp->u_lrval = mips_REG_V0;
# endif
tcp->u_rval = mips_REG_V0;
}
#elif defined(POWERPC)
if (check_errno && (ppc_regs.ccr & 0x10000000)) {
tcp->u_rval = -1;
u_error = ppc_regs.gpr[3];
}
else {
tcp->u_rval = ppc_regs.gpr[3];
}
#elif defined(M68K)
if (check_errno && is_negated_errno(m68k_d0)) {
tcp->u_rval = -1;
u_error = -m68k_d0;
}
else {
tcp->u_rval = m68k_d0;
}
#elif defined(ARM) || defined(AARCH64)
# if defined(AARCH64)
if (tcp->currpers == 1) {
if (check_errno && is_negated_errno(aarch64_regs.regs[0])) {
tcp->u_rval = -1;
u_error = -aarch64_regs.regs[0];
}
else {
tcp->u_rval = aarch64_regs.regs[0];
}
}
else
# endif
{
if (check_errno && is_negated_errno(arm_regs.ARM_r0)) {
tcp->u_rval = -1;
u_error = -arm_regs.ARM_r0;
}
else {
tcp->u_rval = arm_regs.ARM_r0;
}
}
#elif defined(AVR32)
if (check_errno && avr32_regs.r12 && (unsigned) -avr32_regs.r12 < nerrnos) {
tcp->u_rval = -1;
u_error = -avr32_regs.r12;
}
else {
tcp->u_rval = avr32_regs.r12;
}
#elif defined(BFIN)
if (check_errno && is_negated_errno(bfin_r0)) {
tcp->u_rval = -1;
u_error = -bfin_r0;
} else {
tcp->u_rval = bfin_r0;
}
#elif defined(ALPHA)
if (check_errno && alpha_a3) {
tcp->u_rval = -1;
u_error = alpha_r0;
}
else {
tcp->u_rval = alpha_r0;
}
#elif defined(SPARC)
if (check_errno && sparc_regs.psr & PSR_C) {
tcp->u_rval = -1;
u_error = sparc_regs.u_regs[U_REG_O0];
}
else {
tcp->u_rval = sparc_regs.u_regs[U_REG_O0];
}
#elif defined(SPARC64)
if (check_errno && sparc_regs.tstate & 0x1100000000UL) {
tcp->u_rval = -1;
u_error = sparc_regs.u_regs[U_REG_O0];
}
else {
tcp->u_rval = sparc_regs.u_regs[U_REG_O0];
}
#elif defined(HPPA)
if (check_errno && is_negated_errno(hppa_r28)) {
tcp->u_rval = -1;
u_error = -hppa_r28;
}
else {
tcp->u_rval = hppa_r28;
}
#elif defined(SH)
if (check_errno && is_negated_errno(sh_r0)) {
tcp->u_rval = -1;
u_error = -sh_r0;
}
else {
tcp->u_rval = sh_r0;
}
#elif defined(SH64)
if (check_errno && is_negated_errno(sh64_r9)) {
tcp->u_rval = -1;
u_error = -sh64_r9;
}
else {
tcp->u_rval = sh64_r9;
}
#elif defined(METAG)
/* result pointer in D0Re0 (D0.0) */
if (check_errno && is_negated_errno(metag_regs.dx[0][0])) {
tcp->u_rval = -1;
u_error = -metag_regs.dx[0][0];
}
else {
tcp->u_rval = metag_regs.dx[0][0];
}
#elif defined(CRISV10) || defined(CRISV32)
if (check_errno && cris_r10 && (unsigned) -cris_r10 < nerrnos) {
tcp->u_rval = -1;
u_error = -cris_r10;
}
else {
tcp->u_rval = cris_r10;
}
#elif defined(TILE)
/*
* The standard tile calling convention returns the value (or negative
* errno) in r0, and zero (or positive errno) in r1.
* Until at least kernel 3.8, however, the r1 value is not reflected
* in ptregs at this point, so we use r0 here.
*/
if (check_errno && is_negated_errno(tile_regs.regs[0])) {
tcp->u_rval = -1;
u_error = -tile_regs.regs[0];
} else {
tcp->u_rval = tile_regs.regs[0];
}
#elif defined(MICROBLAZE)
if (check_errno && is_negated_errno(microblaze_r3)) {
tcp->u_rval = -1;
u_error = -microblaze_r3;
}
else {
tcp->u_rval = microblaze_r3;
}
#elif defined(OR1K)
if (check_errno && is_negated_errno(or1k_regs.gpr[11])) {
tcp->u_rval = -1;
u_error = -or1k_regs.gpr[11];
}
else {
tcp->u_rval = or1k_regs.gpr[11];
}
#elif defined(XTENSA)
if (check_errno && is_negated_errno(xtensa_a2)) {
tcp->u_rval = -1;
u_error = -xtensa_a2;
}
else {
tcp->u_rval = xtensa_a2;
}
#elif defined(ARC)
if (check_errno && is_negated_errno(arc_regs.scratch.r0)) {
tcp->u_rval = -1;
u_error = -arc_regs.scratch.r0;
}
else {
tcp->u_rval = arc_regs.scratch.r0;
}
#endif
tcp->u_error = u_error;
}
static void
dumpio(struct tcb *tcp)
{
int (*func)();
if (syserror(tcp))
return;
if ((unsigned long) tcp->u_arg[0] >= num_quals)
return;
func = tcp->s_ent->sys_func;
if (func == printargs)
return;
if (qual_flags[tcp->u_arg[0]] & QUAL_READ) {
if (func == sys_read ||
func == sys_pread ||
func == sys_recv ||
func == sys_recvfrom) {
dumpstr(tcp, tcp->u_arg[1], tcp->u_rval);
return;
} else if (func == sys_readv) {
dumpiov(tcp, tcp->u_arg[2], tcp->u_arg[1]);
return;
#if HAVE_SENDMSG
} else if (func == sys_recvmsg) {
dumpiov_in_msghdr(tcp, tcp->u_arg[1]);
return;
} else if (func == sys_recvmmsg) {
dumpiov_in_mmsghdr(tcp, tcp->u_arg[1]);
return;
#endif
}
}
if (qual_flags[tcp->u_arg[0]] & QUAL_WRITE) {
if (func == sys_write ||
func == sys_pwrite ||
func == sys_send ||
func == sys_sendto)
dumpstr(tcp, tcp->u_arg[1], tcp->u_arg[2]);
else if (func == sys_writev)
dumpiov(tcp, tcp->u_arg[2], tcp->u_arg[1]);
#if HAVE_SENDMSG
else if (func == sys_sendmsg)
dumpiov_in_msghdr(tcp, tcp->u_arg[1]);
else if (func == sys_sendmmsg)
dumpiov_in_mmsghdr(tcp, tcp->u_arg[1]);
#endif
}
}
static int
trace_syscall_exiting(struct tcb *tcp)
{
int sys_res;
struct timeval tv;
int res;
long u_error;
/* Measure the exit time as early as possible to avoid errors. */
if (Tflag || cflag)
gettimeofday(&tv, NULL);
#ifdef USE_LIBUNWIND
if (stack_trace_enabled) {
if (tcp->s_ent->sys_flags & STACKTRACE_INVALIDATE_CACHE)
unwind_cache_invalidate(tcp);
}
#endif
#if SUPPORTED_PERSONALITIES > 1
update_personality(tcp, tcp->currpers);
#endif
res = (get_regs_error ? -1 : get_syscall_result(tcp));
if (res == 1) {
get_error(tcp); /* never fails */
if (filtered(tcp) || hide_log_until_execve)
goto ret;
}
if (cflag) {
count_syscall(tcp, &tv);
if (cflag == CFLAG_ONLY_STATS) {
goto ret;
}
}
/* If not in -ff mode, and printing_tcp != tcp,
* then the log currently does not end with output
* of _our syscall entry_, but with something else.
* We need to say which syscall's return is this.
*
* Forced reprinting via TCB_REPRINT is used only by
* "strace -ff -oLOG test/threaded_execve" corner case.
* It's the only case when -ff mode needs reprinting.
*/
if ((followfork < 2 && printing_tcp != tcp) || (tcp->flags & TCB_REPRINT)) {
tcp->flags &= ~TCB_REPRINT;
printleader(tcp);
if (tcp->qual_flg & UNDEFINED_SCNO)
tprintf("<... %s resumed> ", undefined_scno_name(tcp));
else
tprintf("<... %s resumed> ", tcp->s_ent->sys_name);
}
printing_tcp = tcp;
if (res != 1) {
/* There was error in one of prior ptrace ops */
tprints(") ");
tabto();
tprints("= ? <unavailable>\n");
line_ended();
tcp->flags &= ~TCB_INSYSCALL;
return res;
}
sys_res = 0;
if (tcp->qual_flg & QUAL_RAW) {
/* sys_res = printargs(tcp); - but it's nop on sysexit */
} else {
/* FIXME: not_failing_only (IOW, option -z) is broken:
* failure of syscall is known only after syscall return.
* Thus we end up with something like this on, say, ENOENT:
* open("doesnt_exist", O_RDONLY <unfinished ...>
* {next syscall decode}
* whereas the intended result is that open(...) line
* is not shown at all.
*/
if (not_failing_only && tcp->u_error)
goto ret; /* ignore failed syscalls */
sys_res = tcp->s_ent->sys_func(tcp);
}
if(jflag==0)
tprints(") ");
else
tprints(")\n");
tabto();
u_error = tcp->u_error;
if (tcp->qual_flg & QUAL_RAW) {
if (u_error)
{
if(jflag==1)
{
tprintf(" \"Return\" : -1 (errno %ld)", u_error);
}
else
tprintf("= -1 (errno %ld)", u_error);
}
else
{
if(jflag==1)
{
tprintf(" \"Return\" : %#lx", tcp->u_rval);
}
else
tprintf("= %#lx", tcp->u_rval);
}
}
else if (!(sys_res & RVAL_NONE) && u_error) {
switch (u_error) {
/* Blocked signals do not interrupt any syscalls.
* In this case syscalls don't return ERESTARTfoo codes.
*
* Deadly signals set to SIG_DFL interrupt syscalls
* and kill the process regardless of which of the codes below
* is returned by the interrupted syscall.
* In some cases, kernel forces a kernel-generated deadly
* signal to be unblocked and set to SIG_DFL (and thus cause
* death) if it is blocked or SIG_IGNed: for example, SIGSEGV
* or SIGILL. (The alternative is to leave process spinning
* forever on the faulty instruction - not useful).
*
* SIG_IGNed signals and non-deadly signals set to SIG_DFL
* (for example, SIGCHLD, SIGWINCH) interrupt syscalls,
* but kernel will always restart them.
*/
case ERESTARTSYS:
/* Most common type of signal-interrupted syscall exit code.
* The system call will be restarted with the same arguments
* if SA_RESTART is set; otherwise, it will fail with EINTR.
*/
tprints("= ? ERESTARTSYS (To be restarted if SA_RESTART is set)");
break;
case ERESTARTNOINTR:
/* Rare. For example, fork() returns this if interrupted.
* SA_RESTART is ignored (assumed set): the restart is unconditional.
*/
tprints("= ? ERESTARTNOINTR (To be restarted)");
break;
case ERESTARTNOHAND:
/* pause(), rt_sigsuspend() etc use this code.
* SA_RESTART is ignored (assumed not set):
* syscall won't restart (will return EINTR instead)
* even after signal with SA_RESTART set. However,
* after SIG_IGN or SIG_DFL signal it will restart
* (thus the name "restart only if has no handler").
*/
tprints("= ? ERESTARTNOHAND (To be restarted if no handler)");
break;
case ERESTART_RESTARTBLOCK:
/* Syscalls like nanosleep(), poll() which can't be
* restarted with their original arguments use this
* code. Kernel will execute restart_syscall() instead,
* which changes arguments before restarting syscall.
* SA_RESTART is ignored (assumed not set) similarly
* to ERESTARTNOHAND. (Kernel can't honor SA_RESTART
* since restart data is saved in "restart block"
* in task struct, and if signal handler uses a syscall
* which in turn saves another such restart block,
* old data is lost and restart becomes impossible)
*/
tprints("= ? ERESTART_RESTARTBLOCK (Interrupted by signal)");
break;
default:
if ((unsigned long) u_error < nerrnos && errnoent[u_error])
{
if(jflag==1)
{
tprintf(" \"Return\" : -1 %s (%s)", errnoent[u_error],strerror(u_error));
}
else
tprintf("= -1 %s (%s)", errnoent[u_error],strerror(u_error));
}
else
{
if(jflag==1)
{
tprintf(" \"Return\" : -1 ERRNO_%lu (%s)", u_error, strerror(u_error));
}
else
tprintf("= -1 ERRNO_%lu (%s)", u_error, strerror(u_error));
}
break;
}
if ((sys_res & RVAL_STR) && tcp->auxstr)
{
tprintf(" (%s)", tcp->auxstr);
}
}
else {
if (sys_res & RVAL_NONE)
tprints("= ?");
else {
switch (sys_res & RVAL_MASK) {
case RVAL_HEX:
#if SUPPORTED_PERSONALITIES > 1
if (current_wordsize < sizeof(long))
{
if(jflag==1)
{
tprintf(" \"Return\" : %#x",(unsigned int) tcp->u_rval);
}
else
tprintf("= %#x",(unsigned int) tcp->u_rval);
}
else
#endif
if(jflag==1)
{
tprintf(" \"Return\" : %#lx",tcp->u_rval);
}
else
tprintf("= %#lx",tcp->u_rval);
break;
case RVAL_OCTAL:
if(jflag==1)
{
tprintf(" \"Return\" : %#lo", tcp->u_rval);
}
else
tprintf("= %#lo", tcp->u_rval);
break;
case RVAL_UDECIMAL:
if(jflag==1)
{
tprintf(" \"Return\" : %lu", tcp->u_rval);
}
else
tprintf("= %lu", tcp->u_rval);
break;
case RVAL_DECIMAL:
if(jflag==1)
{
tprintf(" \"Return\" : %ld", tcp->u_rval);
}
else
tprintf("= %ld", tcp->u_rval);
break;
case RVAL_FD:
if (show_fd_path) {
tprints("= ");
printfd(tcp, tcp->u_rval);
}
else
{
if(jflag==1)
{
tprintf(" \"Return\" : %ld", tcp->u_rval);
}
else
tprintf("= %ld", tcp->u_rval);
}
break;
#if defined(LINUX_MIPSN32) || defined(X32)
/*
case RVAL_LHEX:
tprintf("= %#llx", tcp->u_lrval);
break;
case RVAL_LOCTAL:
tprintf("= %#llo", tcp->u_lrval);
break;
*/
case RVAL_LUDECIMAL:
tprintf("= %llu", tcp->u_lrval);
break;
/*
case RVAL_LDECIMAL:
tprintf("= %lld", tcp->u_lrval);
break;
*/
#endif
default:
fprintf(stderr,
"invalid rval format\n");
break;
}
}
if ((sys_res & RVAL_STR) && tcp->auxstr)
tprintf(" (%s)", tcp->auxstr);
}
if (Tflag) {
tv_sub(&tv, &tv, &tcp->etime);
if(jflag==1)
{
tprintf("\n \"TimeSpent\" : %ld.%06ld",(long) tv.tv_sec, (long) tv.tv_usec);
}
else
tprintf(" <%ld.%06ld>",(long) tv.tv_sec, (long) tv.tv_usec);
}
tprints("\n");
dumpio(tcp);
line_ended();
#ifdef USE_LIBUNWIND
if (stack_trace_enabled)
unwind_print_stacktrace(tcp);
#endif
ret:
tcp->flags &= ~TCB_INSYSCALL;
return 0;
}
int
trace_syscall(struct tcb *tcp)
{
return exiting(tcp) ?
trace_syscall_exiting(tcp) : trace_syscall_entering(tcp);
}
|
rmcdouga/FitG | Java/FitG-BaseGame/src/com/rogers/rmcdouga/fitg/basegame/map/CapitalType.java | <reponame>rmcdouga/FitG<filename>Java/FitG-BaseGame/src/com/rogers/rmcdouga/fitg/basegame/map/CapitalType.java
package com.rogers.rmcdouga.fitg.basegame.map;
public enum CapitalType {
None, Provincial, Throne;
}
|
alexshepard/inaturalist | app/controllers/taxon_ranges_controller.rb | class TaxonRangesController < ApplicationController
before_filter :curator_required
def new
@taxon_range = TaxonRange.new(:taxon_id => params[:taxon_id].to_i)
end
def edit
@taxon_range = TaxonRange.find(params[:id])
end
def create
@taxon_range = TaxonRange.new(params[:taxon_range])
respond_to do |format|
if @taxon_range.save
format.html { redirect_to(@taxon_range.taxon || taxa_path, :notice => 'TaxonRange was successfully created.') }
else
format.html { render :action => "new" }
end
end
end
def update
@taxon_range = TaxonRange.find(params[:id])
respond_to do |format|
if @taxon_range.update_attributes(params[:taxon_range])
@taxon_range.taxon
format.html { redirect_to(@taxon_range.taxon || taxa_path, :notice => 'TaxonRange was successfully updated.') }
else
format.html { render :action => "edit" }
end
end
end
def destroy
@taxon_range = TaxonRange.find(params[:id])
@taxon_range.destroy
respond_to do |format|
format.html { redirect_to(@taxon_range.taxon) }
end
end
end
|
rsarendus/eyebased-raytracer | eyebased-raytracer-demo/src/main/java/ee/ristoseene/raytracer/eyebased/demo/impl/OrientedVectorBuilder.java | package ee.ristoseene.raytracer.eyebased.demo.impl;
import ee.ristoseene.raytracer.eyebased.projection.Orientation;
import ee.ristoseene.vecmath.Vector3;
import java.util.Objects;
public class OrientedVectorBuilder {
private final Orientation orientation;
private double forwardMultiplier;
private double rightMultiplier;
private double upMultiplier;
private OrientedVectorBuilder(final Orientation orientation) {
this.orientation = orientation;
}
public Orientation getOrientation() {
return orientation;
}
public double getForwardMultiplier() {
return forwardMultiplier;
}
public void setForwardMultiplier(final double forwardMultiplier) {
this.forwardMultiplier = forwardMultiplier;
}
public OrientedVectorBuilder withForwardMultiplier(final double forwardMultiplier) {
setForwardMultiplier(forwardMultiplier);
return this;
}
public double getRightMultiplier() {
return rightMultiplier;
}
public void setRightMultiplier(final double rightMultiplier) {
this.rightMultiplier = rightMultiplier;
}
public OrientedVectorBuilder withRightMultiplier(final double rightMultiplier) {
setRightMultiplier(rightMultiplier);
return this;
}
public double getUpMultiplier() {
return upMultiplier;
}
public void setUpMultiplier(final double upMultiplier) {
this.upMultiplier = upMultiplier;
}
public OrientedVectorBuilder withUpMultiplier(final double upMultiplier) {
setUpMultiplier(upMultiplier);
return this;
}
public <R extends Vector3.Accessible> R build(final Vector3.Factory<R> resultFactory) {
return orientation.multiply(forwardMultiplier, rightMultiplier, upMultiplier, resultFactory);
}
public static OrientedVectorBuilder positionBuilder(final Orientation orientation) {
return new OrientedVectorBuilder(Objects.requireNonNull(orientation))
.withForwardMultiplier(0.0)
.withRightMultiplier(0.0)
.withUpMultiplier(0.0);
}
public static OrientedVectorBuilder scaleBuilder(final Orientation orientation) {
return new OrientedVectorBuilder(Objects.requireNonNull(orientation))
.withForwardMultiplier(1.0)
.withRightMultiplier(1.0)
.withUpMultiplier(1.0);
}
public static OrientedVectorBuilder positionBuilder() {
return positionBuilder(Constants.ORIENTATION);
}
public static OrientedVectorBuilder scaleBuilder() {
return scaleBuilder(Constants.ORIENTATION);
}
}
|
UprootStaging/maven-OpenViewerFX-src | OpenViewerFX/src/main/java/org/jpedal/parser/text/HTMLTextUtils.java | /*
* ===========================================
* Java Pdf Extraction Decoding Access Library
* ===========================================
*
* Project Info: http://www.idrsolutions.com
* Help section for developers at http://www.idrsolutions.com/support/
*
* (C) Copyright 1997-2015 IDRsolutions and Contributors.
*
* This file is part of JPedal/JPDF2HTML5
*
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* ---------------
* HTMLTextUtils.java
* ---------------
*/
package org.jpedal.parser.text;
import org.jpedal.fonts.PdfFont;
import org.jpedal.fonts.StandardFonts;
/**
*
* @author markee
*/
class HTMLTextUtils {
static boolean remapGlyph(PdfFont currentFontData, GlyphData glyphData){
boolean alreadyRemaped=false;
final String charGlyph= currentFontData.getMappedChar(glyphData.getRawInt(), false);
if(charGlyph!=null){
final int newRawInt=currentFontData.getDiffChar(charGlyph);
if(newRawInt!=-1){
glyphData.setRawInt(newRawInt); //only reassign if not -1 as messes up code further down
glyphData.setDisplayValue(String.valueOf((char)newRawInt));
//fix for PDFdata/sample_pdfs_html/general-July2012/klar--men-aldri-ferdig_dacecc.pdf
//in some examples the unicode table is wrong and maps the character into this odd range, but the glyph is always correct
//this is a sanity check to fix this mapping issue
}else if(!glyphData.getDisplayValue().isEmpty() && glyphData.getDisplayValue().charAt(0)<32){
final int altValue=StandardFonts.getAdobeMap(charGlyph);
//this test can return -1 for invalid value as in sample_pdfs_html/general-May2014/18147.pdf
//which breaks code further down so we reject this value
if(altValue>-1) {
glyphData.setRawInt(altValue);
glyphData.set(String.valueOf((char) altValue));
alreadyRemaped = true;
}
}
}
return alreadyRemaped;
}
}
|
iotile/iotile_cloud | server/apps/configattribute/tests/test_config_attribute_api.py | <filename>server/apps/configattribute/tests/test_config_attribute_api.py
import json
from django.contrib.auth import get_user_model
from rest_framework import status
from rest_framework.reverse import reverse
from rest_framework.test import APITestCase
from apps.datablock.models import DataBlock
from apps.physicaldevice.models import Device
from apps.project.models import Project
from apps.streamnote.models import StreamNote
from apps.utils.test_util import TestMixin
from ..models import *
user_model = get_user_model()
class ConfigAttributeAPITests(TestMixin, APITestCase):
def setUp(self):
self.usersTestSetup()
self.orgTestSetup()
self.deviceTemplateTestSetup()
self.d1 = Device.objects.create_device(project=self.p1, label='d1', template=self.dt1, created_by=self.u2)
self.d2 = Device.objects.create_device(project=self.p2, label='d2', template=self.dt1, created_by=self.u3)
self.b1 = DataBlock.objects.create(org=self.p1.org, title='Block1', device=self.d1, block=1, created_by=self.u2)
self.b2 = DataBlock.objects.create(org=self.p1.org, title='Block2', device=self.d1, block=2, created_by=self.u2)
def tearDown(self):
Device.objects.all().delete()
DataBlock.objects.all().delete()
ConfigAttribute.objects.all().delete()
ConfigAttributeName.objects.all().delete()
self.deviceTemplateTestTearDown()
self.orgTestTearDown()
self.userTestTearDown()
def testPostConfigAttributeName(self):
"""
Only Staff can create new configuration attribute names
"""
url = reverse('configattributename-list')
data = {
'name': ':foo'
}
self.assertEqual(ConfigAttributeName.objects.count(), 0)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(ConfigAttributeName.objects.count(), 1)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['name'], ':foo')
# Duplicate not allowed
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = {
'name': 'bad_name'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = {
'name': ':bar',
'description': 'This is a description',
'tags': ['tag1', 'tag2']
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(ConfigAttributeName.objects.count(), 2)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['name'], data['name'])
self.assertEqual(deserialized['description'], data['description'])
self.assertEqual(deserialized['tags'][0], data['tags'][0])
self.assertEqual(deserialized['tags'][1], data['tags'][1])
self.client.logout()
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.client.logout()
def testGetConfigAttributeName(self):
"""
Everybody can get config attr names
"""
ConfigAttributeName.objects.create(name=':foo', tags=['tag1'], created_by=self.u1)
ConfigAttributeName.objects.create(name=':bar', created_by=self.u1)
ConfigAttributeName.objects.create(name=':foo:bar', tags=['tag1', 'tag2'], created_by=self.u1)
self.assertEqual(ConfigAttributeName.objects.count(), 3)
url = reverse('configattributename-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 3)
self.client.logout()
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 3)
response = self.client.get(url + '?name_q=foo', format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 2)
response = self.client.get(url + '?tag=tag1', format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 2)
response = self.client.get(url + '?tag=tag2', format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 1)
self.client.logout()
def testGetConfigAttribute(self):
"""
Everybody can get config attributes
"""
foo_attr1 = ConfigAttribute.objects.get_or_create_attribute(
target=self.o2,
name=':foo',
data={'a': 'b'},
updated_by=self.u1
)
foo_attr2 = ConfigAttribute.objects.get_or_create_attribute(
target=self.o2,
name=':bar',
data={'a': 'b'},
updated_by=self.u1
)
foo_attr3 = ConfigAttribute.objects.get_or_create_attribute(
target=self.o3,
name=':bar',
data={'a': 'c'},
updated_by=self.u1
)
foo_attr4 = ConfigAttribute.objects.get_or_create_attribute(
target=self.u3,
name=':user',
data={'a': 'c'},
updated_by=self.u1
)
self.assertEqual(ConfigAttribute.objects.count(), 4)
url = reverse('configattribute-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
response = self.client.get(url + '?target={}'.format(self.o2.obj_target_slug), format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 2)
self.client.logout()
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
response = self.client.get(url + '?target={}'.format(self.o2.obj_target_slug), format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 2)
response = self.client.get(url + '?target={}'.format(self.o3.obj_target_slug), format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 0)
filter_url = url + '?target={}&name_q=foo'.format(self.o2.obj_target_slug)
response = self.client.get(filter_url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 1)
self.assertEqual(deserialized['results'][0]['id'], foo_attr1.id)
filter_url = url + '?target={}&name_q=bar'.format(self.o2.obj_target_slug)
response = self.client.get(filter_url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 1)
self.assertEqual(deserialized['results'][0]['id'], foo_attr2.id)
self.client.logout()
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
self.assertTrue(self.o3.has_access(self.u3))
response = self.client.get(url + '?target={}'.format(self.o3.obj_target_slug), format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 1)
response = self.client.get(url + '?target={}'.format(self.u3.obj_target_slug), format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['count'], 1)
self.client.logout()
def testSearchConfigAttribute(self):
"""
test search path
"""
project = Project.objects.create(name='Project 2', org=self.o2, created_by=self.u2)
foo_name = ConfigAttributeName.objects.create(name=':foo', created_by=self.u1)
bar_name = ConfigAttributeName.objects.create(name=':bar', created_by=self.u1)
foo_attr1 = ConfigAttribute.objects.get_or_create_attribute(
target=self.o2,
name=foo_name,
data={'a': 'b'},
updated_by=self.u1
)
foo_attr2 = ConfigAttribute.objects.get_or_create_attribute(
target=self.u1,
name=foo_name,
data={'a': 'b'},
updated_by=self.u1
)
bar_attr = ConfigAttribute.objects.get_or_create_attribute(
target=self.u2,
name=bar_name,
data={'c': 'd'},
updated_by=self.u2
)
self.assertEqual(ConfigAttribute.objects.count(), 3)
url = reverse('configattribute-search')
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
self.assertTrue(project.org.id, self.o2.id)
search_url = url + '?target={}&name=:foo'.format(project.obj_target_slug)
response = self.client.get(search_url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['id'], foo_attr1.id)
search_url = url + '?target={}&name=:bar'.format(project.obj_target_slug)
response = self.client.get(search_url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['id'], bar_attr.id)
search_url = url + '?target={}&name=:foobar'.format(project.obj_target_slug)
response = self.client.get(search_url, format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.client.logout()
self.assertEqual(ConfigAttribute.objects.count(), 3)
url = reverse('configattribute-search')
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
self.assertTrue(project.org.id, self.o2.id)
search_url = url + '?target={}&name=:foo'.format(project.obj_target_slug)
response = self.client.get(search_url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.client.logout()
def testPostConfigAttribute(self):
"""
Test that users can create configuration attributes
"""
project = Project.objects.create(name='Project 2', org=self.o2, created_by=self.u2)
url = reverse('configattribute-list')
data = {
'name': ':foo',
'target': project.obj_target_slug,
'data': {
'a': 'b'
}
}
self.assertEqual(ConfigAttribute.objects.count(), 0)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
# Configuration Attirbute Name does not exist
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
ConfigAttributeName.objects.create(name=':foo', tags=['tag1'], created_by=self.u1)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(ConfigAttribute.objects.count(), 1)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['name'], ':foo')
self.assertEqual(deserialized['data']['a'], 'b')
# Duplicate will be equivalent to PATCH
data['data']['a'] = 'c'
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(ConfigAttribute.objects.count(), 1)
deserialized = json.loads(response.content.decode())
self.assertEqual(deserialized['name'], ':foo')
self.assertEqual(deserialized['data']['a'], 'c')
self.client.logout()
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
ConfigAttributeName.objects.create(name=':bar', tags=['tag1'], created_by=self.u1)
data = {
'name': ':bar',
'target': project.obj_target_slug,
'data': {
'a': 'b'
}
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.client.logout()
def testPostBadConfigAttribute(self):
"""
Test that users gets proper errors if illegal or bad payloads
"""
project = Project.objects.create(name='Project 2', org=self.o2, created_by=self.u2)
url = reverse('configattribute-list')
ConfigAttributeName.objects.create(name=':foo', tags=['tag1'], created_by=self.u1)
data = {
'name': ':foo',
'target': self.d1.obj_target_slug.upper(), # see if we can hanlde upper case
'data': {
'a': 'b'
}
}
self.assertEqual(ConfigAttribute.objects.count(), 0)
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = {
'name': ':foo',
'target': self.d2.formatted_gid,
'data': {
'a': 'b'
}
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
d1000 = Device.objects.create_device(id=0x00ff, project=self.d2.project, label='d1', template=self.dt1, created_by=self.u2)
data = {
'name': ':foo',
'target': 'd--0000-0000-0000-00FF',
'data': {
'a': 'b'
}
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.client.logout()
def testPatchConfigAttribute(self):
"""
Test that users can't update configuration attributes using PATCH
"""
project = Project.objects.create(name='Project 2', org=self.o2, created_by=self.u2)
data = {
'a': 'b'
}
name = ConfigAttributeName.objects.create(name=':foo', tags=['tag1'], created_by=self.u1)
ca = ConfigAttribute.objects.create(name=name, target=project.obj_target_slug, data=data, updated_by=self.u1)
self.assertEqual(ConfigAttribute.objects.count(), 1)
self.assertEqual(ca.data, {'a': 'b'})
url = reverse('configattribute-detail', kwargs={'pk': ca.id})
payload = {
'data': {
'c': 'd'
}
}
response = self.client.patch(url, payload, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
response = self.client.patch(url, payload, format='json')
deserialized = json.loads(response.content.decode())
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertTrue('error' in deserialized)
self.assertEqual(deserialized['error'], 'PUT and PATCH methods are not supported; use POST with the right target')
ca = ConfigAttribute.objects.get(pk=ca.id)
self.assertEqual(ConfigAttribute.objects.count(), 1)
self.assertEqual(ca.data, {'a': 'b'})
self.client.logout()
def testPostConfigAttributeWithLog(self):
"""
Test that users can create configuration attributes and make a log
"""
Project.objects.create(name='Project 2', org=self.o2, created_by=self.u2)
url = reverse('configattribute-list')
data = {
'name': ':foo',
'target': self.d1.obj_target_slug,
'log_as_note': True,
'data': {
'a': 'b',
'c': 5
}
}
self.assertEqual(ConfigAttribute.objects.count(), 0)
self.assertEqual(StreamNote.objects.count(), 0)
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
ConfigAttributeName.objects.create(name=':foo', tags=['tag1'], created_by=self.u1)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(ConfigAttribute.objects.count(), 1)
self.assertEqual(StreamNote.objects.count(), 1)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(ConfigAttribute.objects.count(), 1)
self.assertEqual(StreamNote.objects.count(), 2)
for item in StreamNote.objects.all():
self.assertEqual(item.target_slug, self.d1.obj_target_slug)
self.client.logout()
def testPostConfigAttributeBadPayload(self):
"""
Test that users can create configuration attributes
"""
project = Project.objects.create(name='Project 2', org=self.o2, created_by=self.u2)
foo_name = ConfigAttributeName.objects.create(name=':foo', created_by=self.u1)
foo_attr1 = ConfigAttribute.objects.get_or_create_attribute(
target=self.o2,
name=foo_name,
data={'a': 'b'},
updated_by=self.u1
)
url = reverse('configattribute-list')
url_detail = reverse('configattribute-detail', kwargs={'pk': foo_attr1.id})
data = {
'name': ':foo',
'target': project.obj_target_slug,
'a': 'b'
}
self.assertEqual(ConfigAttribute.objects.count(), 1)
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
# Configuration Attirbute Name does not exist
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.content, b'["Required fields: data, target, name"]')
self.client.logout()
def testDeleteConfigAttribute(self):
"""
Test that users can delete configuration attributes
"""
p = Project.objects.create(name='Project 2', org=self.o2, created_by=self.u2)
foo_name = ConfigAttributeName.objects.create(name=':foo', created_by=self.u1)
bar_name = ConfigAttributeName.objects.create(name=':bar', created_by=self.u1)
foobar_name = ConfigAttributeName.objects.create(name=':foo:bar', created_by=self.u1)
attr1 = ConfigAttribute.objects.get_or_create_attribute(
target=p,
name=foo_name,
data={'a': 'b'},
updated_by=self.u1
)
attr2 = ConfigAttribute.objects.get_or_create_attribute(
target=p,
name=bar_name,
data={'a': 'b'},
updated_by=self.u1
)
attr3 = ConfigAttribute.objects.get_or_create_attribute(
target=p,
name=foobar_name,
data={'c': 'd'},
updated_by=self.u2
)
self.assertEqual(ConfigAttribute.objects.count(), 3)
url1 = reverse('configattribute-detail', kwargs={'pk': attr1.id})
url2 = reverse('configattribute-detail', kwargs={'pk': attr2.id})
url3 = reverse('configattribute-detail', kwargs={'pk': attr3.id})
response = self.client.delete(url1, format='json')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
# Configuration Attirbute Name does not exist
response = self.client.delete(url1, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(ConfigAttribute.objects.count(), 2)
self.client.logout()
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
# Configuration Attirbute Name does not exist
response = self.client.delete(url2, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(ConfigAttribute.objects.count(), 1)
response = self.client.delete(url2, format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(ConfigAttribute.objects.count(), 1)
self.client.logout()
ok = self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.assertTrue(ok)
response = self.client.delete(url3, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(ConfigAttribute.objects.count(), 1)
self.client.logout()
|
TimeExceed/aliyun-tablestore-cpp-sdk | src/tablestore/core/protocol/zero_copy_stream.cpp | <reponame>TimeExceed/aliyun-tablestore-cpp-sdk<filename>src/tablestore/core/protocol/zero_copy_stream.cpp
/*
BSD 3-Clause License
Copyright (c) 2017, Alibaba Cloud
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "zero_copy_stream.hpp"
#include "tablestore/util/move.hpp"
#include "tablestore/util/assert.hpp"
using namespace std;
using namespace aliyun::tablestore::util;
namespace aliyun {
namespace tablestore {
namespace core {
MemPoolZeroCopyInputStream::MemPoolZeroCopyInputStream(deque<MemPiece>& a)
: mReadBytes(0)
{
moveAssign(mPieces, util::move(a));
}
bool MemPoolZeroCopyInputStream::Next(const void** data, int* size)
{
if (!mBackupPiece.present() && mPieces.empty()) {
return false;
}
if (mBackupPiece.present()) {
MemPiece x = *mBackupPiece;
mBackupPiece = Optional<MemPiece>();
*data = x.data();
*size = x.length();
mReadBytes += x.length();
} else {
mLastPiece = mPieces.front();
mPieces.pop_front();
*data = mLastPiece.data();
*size = mLastPiece.length();
mReadBytes += mLastPiece.length();
}
return true;
}
void MemPoolZeroCopyInputStream::BackUp(int count)
{
OTS_ASSERT(count <= mLastPiece.length())
(count)
(mLastPiece.length());
MemPiece p = mLastPiece.subpiece(mLastPiece.length() - count, count);
mBackupPiece.reset(util::move(p));
mReadBytes -= count;
}
bool MemPoolZeroCopyInputStream::Skip(int count)
{
for(; count > 0;) {
const void* data = NULL;
int size = 0;
bool ret = Next(&data, &size);
if (!ret) {
return false;
}
if (count >= size) {
count -= size;
} else {
BackUp(size - count);
break;
}
}
return true;
}
int64_t MemPoolZeroCopyInputStream::ByteCount() const
{
return mReadBytes;
}
MemPoolZeroCopyOutputStream::MemPoolZeroCopyOutputStream(
MemPool* mpool)
: mMemPool(mpool),
mByteCount(0)
{}
MemPoolZeroCopyOutputStream::~MemPoolZeroCopyOutputStream()
{
for(; !mBlocks.empty();) {
MemPool::BlockHolder hold(mBlocks.back());
mBlocks.pop_back();
hold.giveBack();
}
}
bool MemPoolZeroCopyOutputStream::Next(void** data, int* size)
{
if (!mBlocks.empty()) {
mPieces.push_back(mBlocks.back()->piece());
mByteCount += mPieces.back().length();
}
mBlocks.push_back(mMemPool->borrow());
MutableMemPiece mmp = mBlocks.back()->mutablePiece();
*data = mmp.begin();
*size = mmp.length();
return true;
}
void MemPoolZeroCopyOutputStream::BackUp(int count)
{
OTS_ASSERT(!mBlocks.empty());
MemPiece mp = mBlocks.back()->piece();
MemPiece realpiece = mp.subpiece(0, mp.length() - count);
mPieces.push_back(realpiece);
mByteCount += realpiece.length();
}
int64_t MemPoolZeroCopyOutputStream::ByteCount() const
{
return mByteCount;
}
MoveHolder<deque<MemPiece> > MemPoolZeroCopyOutputStream::pieces()
{
return util::move(mPieces);
}
} // namespace core
} // namespace tablestore
} // namespace aliyun
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.