repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
linklab/link_rl | common/fast_rl/policy_based_model.py | <reponame>linklab/link_rl
import glob
import math
import os
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
from common.fast_rl import rl_agent
def init_weights(m):
if type(m) == nn.Linear or type(m) == nn.Conv2d:
torch.nn.init.kaiming_normal_(m.weight)
class A2CMLP(nn.Module):
def __init__(self, obs_size, hidden_size_1, hidden_size_2, n_actions):
super(A2CMLP, self).__init__()
self.__name__ = "A2CMLP"
self.net = nn.Sequential(
nn.Linear(obs_size, hidden_size_1),
nn.ReLU(),
nn.Linear(hidden_size_1, hidden_size_2),
nn.ReLU()
)
self.policy = nn.Sequential(
nn.Linear(hidden_size_2, 128),
nn.ReLU(),
nn.Linear(128, n_actions)
)
self.value = nn.Sequential(
nn.Linear(hidden_size_2, 128),
nn.ReLU(),
nn.Linear(128, 1)
)
def init_weights(self, m):
if type(m) == nn.Linear or type(m) == nn.Conv2d:
torch.nn.init.kaiming_normal_(m.weight)
def forward(self, x):
if torch.is_tensor(x):
x = x.to(torch.float32)
else:
x = torch.tensor(x, dtype=torch.float32)
net_out = self.net(x)
policy = self.policy(net_out)
value = self.value(net_out)
return policy, value
class ContinuousA2CMLP(nn.Module):
def __init__(self, obs_size, hidden_size_1, hidden_size_2, n_actions):
super(ContinuousA2CMLP, self).__init__()
self.__name__ = "ContinuousA2CMLP"
self.net = nn.Sequential(
nn.Linear(obs_size, hidden_size_1),
nn.ReLU(),
nn.Linear(hidden_size_1, hidden_size_2),
nn.ReLU()
)
self.mu = nn.Sequential(
nn.Linear(hidden_size_2, 64),
nn.ReLU(),
nn.Linear(64, n_actions),
nn.Tanh()
)
self.var = nn.Sequential(
nn.Linear(hidden_size_2, 64),
nn.ReLU(),
nn.Linear(64, n_actions),
nn.Softplus(),
)
self.value = nn.Sequential(
nn.Linear(hidden_size_2, 64),
nn.ReLU(),
nn.Linear(64, 1)
)
def forward(self, x):
net_out = self.net(x)
return self.mu(net_out), self.var(net_out), self.value(net_out)
#####################################
## DDPGLstmAttention: Begin ##
#####################################
class GruEncoder(nn.Module):
def __init__(self, embedding_dim, hidden_dim, n_layers=1, dropout=0., bidirectional=True):
super(GruEncoder, self).__init__()
self.bidirectional = bidirectional
self.rnn = nn.GRU(
embedding_dim, hidden_dim, n_layers,
batch_first=True,
dropout=dropout, bidirectional=bidirectional
)
def forward(self, input, hidden=None):
return self.rnn(input, hidden)
class NullEmbedding(nn.Module):
def __init__(self):
super(NullEmbedding, self).__init__()
def forward(self, input):
return input
class Attention(nn.Module):
def __init__(self, query_dim, key_dim, value_dim):
super(Attention, self).__init__()
self.scale = 1. / math.sqrt(query_dim) # Scaled Dot Product
def forward(self, query, keys, values):
# Query = [BxH] B: Batch Size, Q: Hidden Size
# Keys = [BxSxH] B: Batch Size, S: Step Length, H: Hidden Size
# Values = [BxSxH] B: Batch Size, S: Step Length, H: Hidden Size
# Outputs = episode_reward:[BxS], attention_value:[BxH]
# Here we assume q_dim == k_dim (dot product attention)
query = query.unsqueeze(1) # [BxH] -> [Bx1xH]
keys = keys.transpose(1, 2) # [BxSxH] -> [BxHxS]
episode_reward = torch.bmm(query, keys) # [Bx1xH]x[BxHxS] -> [Bx1xS], batch_matrix_multiplication: bmm
episode_reward = F.softmax(episode_reward.mul_(self.scale), dim=2) # scale & normalize
attention_value = torch.bmm(episode_reward, values).squeeze(1) # [Bx1xS]x[BxSxH] -> [Bx1xH], 128개 각 값을 softmax 값을 기반으로 재조정
return episode_reward.unsqueeze(1), attention_value
class SelfAttentionRNNRegressor(nn.Module):
def __init__(self, embedding, encoder, attention, hidden_dim, n_actions):
super(SelfAttentionRNNRegressor, self).__init__()
self.embedding = embedding
self.encoder = encoder
self.attention = attention
self.dense_decoder = nn.Linear(hidden_dim, n_actions) # Dense
size = 0
for p in self.parameters():
size += p.nelement()
print('Total param size: {}'.format(size))
def forward(self, input):
outputs, hidden = self.encoder(self.embedding(input))
# output: [32, 4, 128] or [1, 4, 128]
# len(hidden): n_layers
# hidden: [2, 32, 128] or [2, 1, 128] --> [n_layers, batch_size, hidden_size]
hidden = hidden[-1] # take the last layer's cell state
# hidden: [32, 128] or [1, 128]
# TODO: bidirectional은 False 라고 가정, 추후 True 고려하여 코딩 개선
# if self.encoder.bidirectional: # need to concat the last 2 hidden layers
# hidden = torch.cat([hidden[-1], hidden[-2]], dim=1)
episode_reward, attention_value = self.attention(hidden, outputs, outputs) # Q, K, V
pred_value = self.dense_decoder(attention_value) # [B, 1]
return pred_value, episode_reward
class DDPGGruAttentionActor(nn.Module):
def __init__(self, obs_size, hidden_size, n_actions, bidirectional, scale):
super(DDPGGruAttentionActor, self).__init__()
self.__name__ = "DDPGLstmAttentionActor"
encoder = GruEncoder(
embedding_dim=obs_size,
hidden_dim=hidden_size,
n_layers=2,
dropout=0.0,
bidirectional=bidirectional
)
embedding = NullEmbedding()
attention_dim = hidden_size * 2 if bidirectional else hidden_size
attention = Attention(attention_dim, attention_dim, attention_dim) # Query, Key, Value
self.net = SelfAttentionRNNRegressor(embedding, encoder, attention, attention_dim, n_actions=n_actions)
self.net.apply(init_weights)
self.scale = scale
def forward(self, x):
n, _ = self.net(x)
t = torch.tanh(n)
return t * self.scale
class DDPGGruAttentionCritic(nn.Module):
def __init__(self, obs_size, hidden_size_1, hidden_size_2, n_actions, bidirectional):
super(DDPGGruAttentionCritic, self).__init__()
self.__name__ = "DDPGLstmAttentionCritic"
encoder = GruEncoder(
embedding_dim=obs_size,
hidden_dim=hidden_size_1,
n_layers=2,
dropout=0.0,
bidirectional=bidirectional
)
embedding = NullEmbedding()
attention_dim = hidden_size_1 * 2 if bidirectional else hidden_size_1
attention = Attention(attention_dim, attention_dim, attention_dim)
self.obs_net = SelfAttentionRNNRegressor(embedding, encoder, attention, attention_dim, n_actions=1)
self.out_net = nn.Sequential(
nn.Linear(1 + n_actions, hidden_size_2),
nn.ReLU(),
nn.Linear(hidden_size_2, 1)
)
self.obs_net.apply(init_weights)
self.out_net.apply(init_weights)
def forward(self, x, a):
obs, _ = self.obs_net(x)
return self.out_net(torch.cat([obs, a], dim=1))
#####################################
## DDPGLstmAttention: End ##
#####################################
#####################################
## DDPGLstm: Start ##
#####################################
class DDPGGruActor(nn.Module):
def __init__(self, obs_size, hidden_size_1, hidden_size_2, n_actions, bidirectional, scale):
super(DDPGGruActor, self).__init__()
self.__name__ = "DDPGGruActor"
self.net = GruEncoder(
embedding_dim=obs_size,
hidden_dim=hidden_size_1,
n_layers=2,
dropout=0.0,
bidirectional=bidirectional
)
self.action_net = nn.Sequential(
nn.Linear(hidden_size_1, hidden_size_2),
nn.ReLU(),
nn.Linear(hidden_size_2, n_actions),
)
self.net.apply(init_weights)
self.action_net.apply(init_weights)
self.scale = scale
def forward(self, x):
num_state_batch = x.shape[0]
n_1, _ = self.net(x)
n_2 = self.action_net(n_1[0:num_state_batch, -1, :])
t = torch.tanh(n_2)
return t * self.scale
class DDPGGruCritic(nn.Module):
def __init__(self, obs_size, hidden_size_1, hidden_size_2, n_actions, bidirectional):
super(DDPGGruCritic, self).__init__()
self.__name__ = "DDPGGruCritic"
self.obs_net = GruEncoder(
embedding_dim=obs_size,
hidden_dim=hidden_size_1,
n_layers=2,
dropout=0.0,
bidirectional=bidirectional
)
self.out_net = nn.Sequential(
nn.Linear(hidden_size_1 + n_actions, hidden_size_2),
nn.ReLU(),
nn.Linear(hidden_size_2, 1)
)
self.obs_net.apply(init_weights)
self.out_net.apply(init_weights)
def forward(self, x, a):
num_action_batch = a.shape[0]
obs, _ = self.obs_net(x)
obs = obs[0:num_action_batch, -1, :]
return self.out_net(torch.cat([obs, a], dim=1)).squeeze(dim=0)
#####################################
## DDPGLstm: End ##
#####################################
class DDPGActor(nn.Module):
def __init__(self, obs_size, hidden_size_1, hidden_size_2, n_actions, scale):
super(DDPGActor, self).__init__()
self.__name__ = "DDPGActor"
self.net = nn.Sequential(
nn.Linear(obs_size, hidden_size_1),
nn.BatchNorm1d(num_features=hidden_size_1),
nn.ReLU(),
nn.Linear(hidden_size_1, hidden_size_2),
nn.BatchNorm1d(num_features=hidden_size_2),
nn.ReLU(),
nn.Linear(hidden_size_2, hidden_size_2),
nn.BatchNorm1d(num_features=hidden_size_2),
nn.ReLU(),
nn.Linear(hidden_size_2, n_actions),
)
self.net.apply(init_weights)
self.scale = scale
def forward(self, x):
n = self.net(x)
t = torch.tanh(n)
return t * self.scale
class DDPGCritic(nn.Module):
def __init__(self, obs_size, hidden_size_1, hidden_size_2, n_actions):
super(DDPGCritic, self).__init__()
self.__name__ = "DDPGCritic"
self.obs_net = nn.Sequential(
nn.Linear(obs_size, hidden_size_1),
nn.ReLU(),
nn.Linear(hidden_size_1, hidden_size_2),
nn.ReLU(),
)
self.out_net = nn.Sequential(
nn.Linear(hidden_size_2 + n_actions, hidden_size_2),
nn.ReLU(),
nn.Linear(hidden_size_2, hidden_size_2),
nn.ReLU(),
nn.Linear(hidden_size_2, 1)
)
self.obs_net.apply(init_weights)
self.out_net.apply(init_weights)
def forward(self, x, a):
obs = self.obs_net(x)
return self.out_net(torch.cat([obs, a], dim=1))
class D4PGCritic(nn.Module):
def __init__(self, obs_size, hidden_size_1, hidden_size_2, n_actions, v_min, v_max, n_atoms):
super(D4PGCritic, self).__init__()
self.__name__ = "DDPGCritic"
self.obs_net = nn.Sequential(
nn.Linear(obs_size, hidden_size_1),
nn.ReLU(),
)
self.out_net = nn.Sequential(
nn.Linear(hidden_size_1 + n_actions, hidden_size_2),
nn.ReLU(),
nn.Linear(hidden_size_2, n_atoms)
)
delta = (v_max - v_min) / (n_atoms - 1)
self.register_buffer("supports", torch.arange(v_min, v_max + delta, delta))
self.obs_net.apply(init_weights)
self.out_net.apply(init_weights)
def forward(self, x, a):
obs = self.obs_net(x)
return self.out_net(torch.cat([obs, a], dim=1))
def distribution_to_q_value(self, distribution):
weights = F.softmax(distribution, dim=1) * self.supports
res = weights.sum(dim=1)
return res.unsqueeze(dim=-1)
def unpack_batch_for_a2c(batch, net, params, device='cpu'):
"""
Convert batch into training tensors
:param batch:
:param net:
:return: states variable, actions tensor, target values variable
"""
states, actions, rewards, not_done_idx, last_states = [], [], [], [], []
for idx, exp in enumerate(batch):
states.append(np.array(exp.state, copy=False))
actions.append(int(exp.action))
rewards.append(exp.reward)
if exp.last_state is not None:
not_done_idx.append(idx)
last_states.append(np.array(exp.last_state, copy=False))
states_v = torch.FloatTensor(np.array(states, copy=False)).to(device)
actions_v = torch.LongTensor(actions).to(device)
# handle rewards
rewards_np = np.array(rewards, dtype=np.float32)
if not_done_idx:
last_states_v = torch.FloatTensor(np.array(last_states, copy=False)).to(device)
last_values_v = net.base.forward_critic(last_states_v)
last_values_np = last_values_v.data.cpu().numpy()[:, 0] * params.GAMMA ** params.N_STEP
rewards_np[not_done_idx] += last_values_np
target_action_values_v = torch.FloatTensor(rewards_np).to(device)
return states_v, actions_v, target_action_values_v
def unpack_batch_for_ddpg(batch, device="cpu"):
states, actions, rewards, dones, last_states = [], [], [], [], []
for exp in batch:
states.append(np.array(exp.state, copy=False))
actions.append(exp.action)
rewards.append(exp.reward)
dones.append(exp.last_state is None)
if exp.last_state is None:
last_states.append(exp.state) # the result will be masked anyway
else:
last_states.append(np.array(exp.last_state, copy=False))
states_v = rl_agent.float32_preprocessor(states).to(device)
actions_v = rl_agent.float32_preprocessor(actions).to(device)
rewards_v = rl_agent.float32_preprocessor(rewards).to(device)
last_states_v = rl_agent.float32_preprocessor(last_states).to(device)
dones_t = torch.BoolTensor(dones).to(device)
return states_v, actions_v, rewards_v, dones_t, last_states_v
def distr_projection(next_distr_v, rewards_v, dones_mask_t, gamma, device="cpu"):
next_distr = next_distr_v.data.cpu().numpy()
rewards = rewards_v.data.cpu().numpy()
dones_mask = dones_mask_t.cpu().numpy().astype(np.bool)
batch_size = len(rewards)
proj_distr = np.zeros((batch_size, N_ATOMS), dtype=np.float32)
for atom in range(N_ATOMS):
tz_j = np.minimum(Vmax, np.maximum(
Vmin, rewards + (Vmin + atom * DELTA_Z) * gamma))
b_j = (tz_j - Vmin) / DELTA_Z
l = np.floor(b_j).astype(np.int64)
u = np.ceil(b_j).astype(np.int64)
eq_mask = u == l
proj_distr[eq_mask, l[eq_mask]] += \
next_distr[eq_mask, atom]
ne_mask = u != l
proj_distr[ne_mask, l[ne_mask]] += \
next_distr[ne_mask, atom] * (u - b_j)[ne_mask]
proj_distr[ne_mask, u[ne_mask]] += \
next_distr[ne_mask, atom] * (b_j - l)[ne_mask]
if dones_mask.any():
proj_distr[dones_mask] = 0.0
tz_j = np.minimum(Vmax, np.maximum(
Vmin, rewards[dones_mask]))
b_j = (tz_j - Vmin) / DELTA_Z
l = np.floor(b_j).astype(np.int64)
u = np.ceil(b_j).astype(np.int64)
eq_mask = u == l
eq_dones = dones_mask.copy()
eq_dones[dones_mask] = eq_mask
if eq_dones.any():
proj_distr[eq_dones, l[eq_mask]] = 1.0
ne_mask = u != l
ne_dones = dones_mask.copy()
ne_dones[dones_mask] = ne_mask
if ne_dones.any():
proj_distr[ne_dones, l[ne_mask]] = (u - b_j)[ne_mask]
proj_distr[ne_dones, u[ne_mask]] = (b_j - l)[ne_mask]
return torch.FloatTensor(proj_distr).to(device) |
mghgroup/Glide-Browser | chrome/android/javatests/src/org/chromium/chrome/browser/payments/PaymentRequestBillingAddressTest.java | // Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.payments;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.endsWith;
import static org.chromium.chrome.browser.payments.PaymentRequestTestRule.DECEMBER;
import static org.chromium.chrome.browser.payments.PaymentRequestTestRule.FIRST_BILLING_ADDRESS;
import static org.chromium.chrome.browser.payments.PaymentRequestTestRule.NEXT_YEAR;
import androidx.test.filters.MediumTest;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.Feature;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.autofill.AutofillTestHelper;
import org.chromium.chrome.browser.autofill.PersonalDataManager.AutofillProfile;
import org.chromium.chrome.browser.autofill.PersonalDataManager.CreditCard;
import org.chromium.chrome.browser.flags.ChromeSwitches;
import org.chromium.chrome.browser.payments.PaymentRequestTestRule.MainActivityStartCallback;
import org.chromium.chrome.test.ChromeJUnit4ClassRunner;
import org.chromium.ui.test.util.DisableAnimationsTestRule;
import java.util.concurrent.TimeoutException;
/**
* A payment integration test for biling addresses.
*/
@RunWith(ChromeJUnit4ClassRunner.class)
@CommandLineFlags.Add({ChromeSwitches.DISABLE_FIRST_RUN_EXPERIENCE})
public class PaymentRequestBillingAddressTest implements MainActivityStartCallback {
// Disable animations to reduce flakiness.
@ClassRule
public static DisableAnimationsTestRule sNoAnimationsRule = new DisableAnimationsTestRule();
@Rule
public PaymentRequestTestRule mPaymentRequestTestRule =
new PaymentRequestTestRule("payment_request_free_shipping_test.html", this);
/**
* The index at which the option to add a billing address is located in the billing address
* selection dropdown.
*/
private static final int ADD_BILLING_ADDRESS = 8;
/** The index of the billing address dropdown in the card editor. */
private static final int BILLING_ADDRESS_DROPDOWN_INDEX = 2;
@Override
public void onMainActivityStarted() throws TimeoutException {
AutofillTestHelper helper = new AutofillTestHelper();
String profile1 = helper.setProfile(new AutofillProfile("", "https://example.com", true,
"<NAME>", "Google", "340 Main St", "CA", "Los Angeles", "", "90291", "", "US",
"650-253-0000", "<EMAIL>", "en-US"));
helper.setCreditCard(new CreditCard("", "https://example.com", true, true, "<NAME>",
"4111111111111111", "1111", "12", "2050", "amex", R.drawable.amex_card, profile1,
"" /* serverId */));
String profile2 = helper.setProfile(new AutofillProfile("", "https://example.com", true,
"<NAME>", "Google", "340 Main St", "CA", "Los Angeles", "", "90291", "", "US",
"650-253-0000", "<EMAIL>", "en-US"));
String profile3 = helper.setProfile(new AutofillProfile("", "https://example.com", true,
"<NAME>", "Google", "340 Main St", "CA", "Los Angeles", "", "90291", "", "US",
"650-253-0000", "<EMAIL>", "en-US"));
// Incomplete profile (invalid address).
String profile4 = helper.setProfile(new AutofillProfile("", "https://example.com", true,
"<NAME>", "Google", "340 Main St", "CA", "", "", "90291", "", "US",
"650-253-0000", "<EMAIL>", "en-US"));
// Incomplete profile (missing phone number)
String profile5 = helper.setProfile(new AutofillProfile("", "https://example.com", true,
"<NAME>", "Google", "340 Main St", "CA", "Los Angeles", "", "90291", "", "US", "",
"<EMAIL>", "en-US"));
// Incomplete profile (missing recipient name).
String profile6 = helper.setProfile(new AutofillProfile("", "https://example.com", true, "",
"Google", "340 Main St", "CA", "Los Angeles", "", "90291", "", "US", "650-253-0000",
"<EMAIL>", "en-US"));
// Incomplete profile (need more information).
String profile7 = helper.setProfile(new AutofillProfile("", "https://example.com", true, "",
"Google", "340 Main St", "CA", "", "", "90291", "", "US", "", "", "en-US"));
// Profile with empty street address (should not be presented to user).
String profile8 = helper.setProfile(new AutofillProfile("", "https://example.com", true,
"<NAME>", "Google", "" /* streetAddress */, "CA", "Los Angeles", "", "90291", "",
"US", "650-253-0000", "<EMAIL>", "en-US"));
// This card has no billing address selected.
helper.setCreditCard(new CreditCard("", "https://example.com", true, true, "<NAME>",
"4242424242424242", "1111", "12", "2050", "amex", R.drawable.amex_card, profile6,
"" /* serverId */));
// Assign use stats so that incomplete profiles have the highest frecency, profile2 has the
// highest frecency and profile3 has the lowest among the complete profiles, and profile8
// has the highest frecency and profile4 has the lowest among the incomplete profiles.
helper.setProfileUseStatsForTesting(profile1, 5, 5);
helper.setProfileUseStatsForTesting(profile2, 10, 10);
helper.setProfileUseStatsForTesting(profile3, 1, 1);
helper.setProfileUseStatsForTesting(profile4, 15, 15);
helper.setProfileUseStatsForTesting(profile5, 30, 30);
helper.setProfileUseStatsForTesting(profile6, 25, 25);
helper.setProfileUseStatsForTesting(profile7, 20, 20);
helper.setProfileUseStatsForTesting(profile8, 40, 40);
}
/** Verifies the format of the billing address suggestions when adding a new credit card. */
@Test
@MediumTest
@Feature({"Payments"})
public void testNewCardBillingAddressFormat() throws TimeoutException {
mPaymentRequestTestRule.triggerUIAndWait(mPaymentRequestTestRule.getReadyToPay());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_section, mPaymentRequestTestRule.getReadyForInput());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_add_option_button, mPaymentRequestTestRule.getReadyToEdit());
mPaymentRequestTestRule.setTextInCardEditorAndWait(
new String[] {"5454-5454-5454-5454", "Bob"},
mPaymentRequestTestRule.getEditorTextUpdate());
mPaymentRequestTestRule.setSpinnerSelectionsInCardEditorAndWait(
new int[] {DECEMBER, NEXT_YEAR, FIRST_BILLING_ADDRESS},
mPaymentRequestTestRule.getBillingAddressChangeProcessed());
// The billing address suggestions should include only the name, address, city, state and
// zip code of the profile.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerSelectionTextInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
}
/**
* Verifies that the correct number of billing address suggestions are shown when adding a new
* credit card.
*/
@Test
@MediumTest
@Feature({"Payments"})
public void testNumberOfBillingAddressSuggestions() throws TimeoutException {
mPaymentRequestTestRule.triggerUIAndWait(mPaymentRequestTestRule.getReadyToPay());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_section, mPaymentRequestTestRule.getReadyForInput());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_add_option_button, mPaymentRequestTestRule.getReadyToEdit());
// There should only be 9 suggestions, the 7 saved addresses, the select hint and the
// option to add a new address.
Assert.assertEquals(9,
mPaymentRequestTestRule.getSpinnerItemCountInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX));
}
/**
* Verifies that the correct number of billing address suggestions are shown when adding a new
* credit card, even after cancelling out of adding a new billing address.
*/
@Test
@MediumTest
@Feature({"Payments"})
public void testNumberOfBillingAddressSuggestions_AfterCancellingNewBillingAddress()
throws TimeoutException {
// Add a payment method and add a new billing address.
mPaymentRequestTestRule.triggerUIAndWait(mPaymentRequestTestRule.getReadyToPay());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_section, mPaymentRequestTestRule.getReadyForInput());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_add_option_button, mPaymentRequestTestRule.getReadyToEdit());
// Select the "+ ADD ADDRESS" option for the billing address.
mPaymentRequestTestRule.setSpinnerSelectionsInCardEditorAndWait(
new int[] {DECEMBER, NEXT_YEAR, ADD_BILLING_ADDRESS},
mPaymentRequestTestRule.getReadyToEdit());
// Cancel the creation of a new billing address.
mPaymentRequestTestRule.clickInEditorAndWait(
R.id.payments_edit_cancel_button, mPaymentRequestTestRule.getReadyToEdit());
// There should still only be 9 suggestions, the 7 saved addresses, the select hint and
// the option to add a new address.
Assert.assertEquals(9,
mPaymentRequestTestRule.getSpinnerItemCountInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX));
}
/**
* Tests that for a card that already has a billing address, adding a new one and cancelling
* maintains the previous selection. */
@Test
@MediumTest
@Feature({"Payments"})
public void testAddBillingAddressOnCardAndCancel_MaintainsPreviousSelection()
throws TimeoutException {
mPaymentRequestTestRule.triggerUIAndWait(mPaymentRequestTestRule.getReadyToPay());
// Edit the only card.
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_section, mPaymentRequestTestRule.getReadyForInput());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_open_editor_pencil_button, mPaymentRequestTestRule.getReadyToEdit());
// <NAME> is selected as the billing address.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerSelectionTextInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
// Select the "+ ADD ADDRESS" option for the billing address.
mPaymentRequestTestRule.setSpinnerSelectionsInCardEditorAndWait(
new int[] {DECEMBER, NEXT_YEAR, ADD_BILLING_ADDRESS},
mPaymentRequestTestRule.getReadyToEdit());
// Cancel the creation of a new billing address.
mPaymentRequestTestRule.clickInEditorAndWait(
R.id.payments_edit_cancel_button, mPaymentRequestTestRule.getReadyToEdit());
// <NAME> is STILL selected as the billing address.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerSelectionTextInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
}
/**
* Tests that adding a billing address for a card that has none, and cancelling then returns
* to the proper selection (Select...).
*/
@Test
@MediumTest
@Feature({"Payments"})
public void testAddBillingAddressOnCardWithNoBillingAndCancel_MaintainsPreviousSelection()
throws TimeoutException {
mPaymentRequestTestRule.triggerUIAndWait(mPaymentRequestTestRule.getReadyToPay());
// Edit the second card.
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_section, mPaymentRequestTestRule.getReadyForInput());
mPaymentRequestTestRule.clickOnPaymentMethodSuggestionOptionAndWait(
1, mPaymentRequestTestRule.getReadyToEdit());
// Now in Card Editor to add a billing address. "Select" is selected in the dropdown.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerSelectionTextInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX),
"Select");
// Select the "+ ADD ADDRESS" option for the billing address.
mPaymentRequestTestRule.setSpinnerSelectionsInCardEditorAndWait(
new int[] {DECEMBER, NEXT_YEAR, ADD_BILLING_ADDRESS},
mPaymentRequestTestRule.getReadyToEdit());
// Cancel the creation of a new billing address.
mPaymentRequestTestRule.clickInEditorAndWait(
R.id.payments_edit_cancel_button, mPaymentRequestTestRule.getReadyToEdit());
// "Select" is STILL selected as the billing address.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerSelectionTextInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX),
"Select");
}
/**
* Verifies that the billing address suggestions are ordered by frecency.
*/
@Test
@MediumTest
@Feature({"Payments"})
public void testBillingAddressSortedByFrecency() throws TimeoutException {
// Add a payment method.
mPaymentRequestTestRule.triggerUIAndWait(mPaymentRequestTestRule.getReadyToPay());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_section, mPaymentRequestTestRule.getReadyForInput());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_add_option_button, mPaymentRequestTestRule.getReadyToEdit());
// There should be 9 suggestions, the 7 saved addresses, the select hint and the option to
// add a new address.
Assert.assertEquals(9,
mPaymentRequestTestRule.getSpinnerItemCountInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX));
// The billing address suggestions should be ordered by frecency.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 0),
"Select");
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 1),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 2),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 3),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 8),
"Add address");
}
/**
* Verifies that the billing address suggestions are ordered by frecency, except for a newly
* created address which should be suggested first.
*/
@Test
@MediumTest
@Feature({"Payments"})
public void testBillingAddressSortedByFrecency_AddNewAddress() throws TimeoutException {
// Add a payment method.
mPaymentRequestTestRule.triggerUIAndWait(mPaymentRequestTestRule.getReadyToPay());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_section, mPaymentRequestTestRule.getReadyForInput());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_add_option_button, mPaymentRequestTestRule.getReadyToEdit());
// Add a new billing address.
mPaymentRequestTestRule.setSpinnerSelectionsInCardEditorAndWait(
new int[] {DECEMBER, NEXT_YEAR, ADD_BILLING_ADDRESS},
mPaymentRequestTestRule.getReadyToEdit());
mPaymentRequestTestRule.setTextInEditorAndWait(
new String[] {"<NAME>", "Google", "340 Main St", "Los Angeles", "CA", "90291",
"650-253-0000"},
mPaymentRequestTestRule.getEditorTextUpdate());
mPaymentRequestTestRule.clickInEditorAndWait(
R.id.editor_dialog_done_button, mPaymentRequestTestRule.getReadyToEdit());
// There should be 10 suggestions, the 7 initial addresses, the newly added address, the
// select hint and the option to add a new address.
Assert.assertEquals(10,
mPaymentRequestTestRule.getSpinnerItemCountInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX));
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 0),
"Select");
// The fist address suggestion should be the newly added address.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 1),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
// The rest of the billing address suggestions should be ordered by frecency.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 2),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 3),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 4),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 9),
"Add address");
}
/**
* Verifies that a newly created shipping address is offered as the first billing address
* suggestion.
*/
@Test
@MediumTest
@Feature({"Payments"})
public void testNewShippingAddressSuggestedFirst() throws TimeoutException {
mPaymentRequestTestRule.triggerUIAndWait(mPaymentRequestTestRule.getReadyToPay());
// Add a shipping address.
mPaymentRequestTestRule.clickInShippingAddressAndWait(
R.id.payments_section, mPaymentRequestTestRule.getReadyForInput());
mPaymentRequestTestRule.clickInShippingAddressAndWait(
R.id.payments_add_option_button, mPaymentRequestTestRule.getReadyToEdit());
mPaymentRequestTestRule.setTextInEditorAndWait(
new String[] {"<NAME>", "Google", "340 Main St", "Los Angeles", "CA", "90291",
"650-253-0000"},
mPaymentRequestTestRule.getEditorTextUpdate());
mPaymentRequestTestRule.clickInEditorAndWait(
R.id.editor_dialog_done_button, mPaymentRequestTestRule.getReadyToPay());
// Navigate to the card editor UI.
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_section, mPaymentRequestTestRule.getReadyForInput());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_add_option_button, mPaymentRequestTestRule.getReadyToEdit());
// There should be 10 suggestions, the 7 initial addresses, the newly added address, the
// select hint and the option to add a new address.
Assert.assertEquals(10,
mPaymentRequestTestRule.getSpinnerItemCountInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX));
// The new address must be put at the top of the dropdown list, right after the
// select hint.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, FIRST_BILLING_ADDRESS),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
}
@Test
@MediumTest
@Feature({"Payments"})
public void testSelectIncompleteBillingAddress_EditComplete() throws TimeoutException {
mPaymentRequestTestRule.triggerUIAndWait(mPaymentRequestTestRule.getReadyToPay());
// Edit the second card.
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_section, mPaymentRequestTestRule.getReadyForInput());
mPaymentRequestTestRule.clickOnPaymentMethodSuggestionOptionAndWait(
1, mPaymentRequestTestRule.getReadyToEdit());
// Now "Select" is selected in the dropdown.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerSelectionTextInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX),
"Select");
// The incomplete addresses in the dropdown contain edit required messages.
assertThat(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 5),
endsWith("Name required"));
assertThat(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 6),
endsWith("More information required"));
assertThat(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 7),
endsWith("Enter a valid address"));
// Selects the fourth billing address (the 5th option on the dropdown list) that misses
// recipient name brings up the address editor.
mPaymentRequestTestRule.setSpinnerSelectionsInCardEditorAndWait(
new int[] {DECEMBER, NEXT_YEAR, 5}, mPaymentRequestTestRule.getReadyToEdit());
mPaymentRequestTestRule.setTextInEditorAndWait(
new String[] {"<NAME>", "Google", "340 Main St", "Los Angeles", "CA", "90291",
"650-253-0000"},
mPaymentRequestTestRule.getEditorTextUpdate());
mPaymentRequestTestRule.clickInEditorAndWait(
R.id.editor_dialog_done_button, mPaymentRequestTestRule.getReadyToEdit());
// The newly completed address must be selected and put at the top of the dropdown list,
// right after the select hint.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerSelectionTextInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, FIRST_BILLING_ADDRESS),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
}
@Test
@MediumTest
@Feature({"Payments"})
public void testSelectIncompleteBillingAddress_EditCancel() throws TimeoutException {
mPaymentRequestTestRule.triggerUIAndWait(mPaymentRequestTestRule.getReadyToPay());
// Edit the only complete card.
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_section, mPaymentRequestTestRule.getReadyForInput());
mPaymentRequestTestRule.clickInPaymentMethodAndWait(
R.id.payments_open_editor_pencil_button, mPaymentRequestTestRule.getReadyToEdit());
// <NAME> is selected as the billing address.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerSelectionTextInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
// The incomplete addresses in the dropdown contain edit required messages.
assertThat(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 5),
endsWith("Name required"));
assertThat(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 6),
endsWith("More information required"));
assertThat(mPaymentRequestTestRule.getSpinnerTextAtPositionInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX, 7),
endsWith("Enter a valid address"));
// Selects the forth billing address (the 5th option on the dropdown list) that misses
// recipient name brings up the address editor.
mPaymentRequestTestRule.setSpinnerSelectionsInCardEditorAndWait(
new int[] {DECEMBER, NEXT_YEAR, 5}, mPaymentRequestTestRule.getReadyToEdit());
mPaymentRequestTestRule.clickInEditorAndWait(
R.id.payments_edit_cancel_button, mPaymentRequestTestRule.getReadyToEdit());
// The previous selected address should be selected after canceling out from edit.
Assert.assertEquals(mPaymentRequestTestRule.getSpinnerSelectionTextInCardEditor(
BILLING_ADDRESS_DROPDOWN_INDEX),
"<NAME>, 340 Main St, Los Angeles, CA 90291");
}
}
|
domax/gwt-node | modules/cassandra/src/org/gwtnode/modules/cassandra/System.java | <filename>modules/cassandra/src/org/gwtnode/modules/cassandra/System.java
/*
* Copyright 2013 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.gwtnode.modules.cassandra;
import org.gwtnode.core.JavaScriptFunction;
import org.gwtnode.core.JsonStringObjectMap;
import org.gwtnode.core.meta.GwtNodeEvent;
import org.gwtnode.core.meta.GwtNodeFunction;
import org.gwtnode.core.meta.GwtNodeObject;
import org.gwtnode.core.node.event.EventEmitter;
import org.gwtnode.core.node.event.ParameterlessEventHandler;
import com.google.gwt.core.client.JavaScriptObject;
@GwtNodeObject
public class System extends EventEmitter {
@GwtNodeFunction("constructor")
public static final native System create(String urn) /*-{
return new (@org.gwtnode.modules.cassandra.Cassandra::get()()).System(urn);
}-*/;
protected System() {}
@GwtNodeFunction
public final void addKeyspace(JsonStringObjectMap<?> ksDef, ThriftConnectionEventHandler callback) {
addKeyspace(ksDef != null ? ksDef.getNativeObject() : null,
callback != null ? callback.getNativeFunction() : null);
}
@GwtNodeFunction
private final native void addKeyspace(JavaScriptObject ksDef, JavaScriptFunction callback) /*-{
this.addKeyspace(ksDef, callback);
}-*/;
@GwtNodeFunction
public final void describeKeyspace(String ksName, ThriftConnectionEventHandler callback) {
describeKeyspace(ksName, callback != null ? callback.getNativeFunction() : null);
}
@GwtNodeFunction
private final native void describeKeyspace(String ksName, JavaScriptFunction callback) /*-{
this.describeKeyspace(ksName, callback);
}-*/;
@GwtNodeFunction
public final void close(ParameterlessEventHandler callback) {
close(callback != null ? callback.getNativeFunction() : null);
}
@GwtNodeFunction
private final native void close(JavaScriptFunction callback) /*-{
this.close(callback);
}-*/;
@GwtNodeEvent
public final void onCheckq(ParameterlessEventHandler handler) {
on("checkq", handler);
}
}
|
PacktPublishing/Practical-OneOps | Chapter 09/circuit-oneops-1-master/components/cookbooks/iis/resources/staticcompression.rb | <gh_stars>1-10
actions :configure
default_action :configure
# Compression level - from 0 (none) to 10 (maximum)
attribute :level, kind_of: Integer, default: 7
# Which mime-types will be / will not be compressed
attribute :mime_types, kind_of: Hash, default: {
"text/*" => true,
"message/*" => true,
"application/x-javascript" => true,
"application/atom+xml" => true,
"application/json" => true,
"application/xml" => true,
"*/*" => false
}
# The percentage of CPU utilization (0-100) above which compression is disabled
attribute :cpu_usage_to_disable, kind_of: Integer, default: 90
# The percentage of CPU utilization (0-100) below which compression is re-enabled after disable due to excess usage
attribute :cpu_usage_to_reenable, kind_of: Integer, default: 50
#The directory where compressed versions of static files are temporarily stored and cached.
attribute :directory, kind_of: String, default: '%SystemDrive%\inetpub\temp\IIS Temporary Compressed Files'
|
BridgesUNCC/OpenDSA-LTI | db/migrate/20150328150855_remove_name_from_course_offering.rb | class RemoveNameFromCourseOffering < ActiveRecord::Migration
def change
remove_column :course_offerings, :name, :string
change_column_null :course_offerings, :label, false
end
end
|
AnnotationSro/java-annotation-mapper | jam-processor/src/main/java/sk/annotation/library/jam/processor/utils/annotations/data/AnnotationMapperConfig.java | package sk.annotation.library.jam.processor.utils.annotations.data;
import com.sun.tools.javac.code.Type;
import lombok.Getter;
import lombok.Setter;
import sk.annotation.library.jam.annotations.enums.ApplyFieldStrategy;
import sk.annotation.library.jam.processor.utils.annotations.data.fields.AnnotationFieldIgnore;
import sk.annotation.library.jam.processor.utils.annotations.data.fields.AnnotationFieldMapping;
import java.util.LinkedList;
import java.util.List;
@Getter
@Setter
public class AnnotationMapperConfig {
public TypeConfig type;
final private List<AnnotationFieldMapping> fieldMapping = new LinkedList<>();
final private List<AnnotationFieldIgnore> fieldIgnore = new LinkedList<>();
final private List<AnnotationConfigGenerator> config = new LinkedList<>();
final private List<Type> immutable = new LinkedList<>();
final private List<Type> withCustom = new LinkedList<>();
final private List<ApplyFieldStrategy> applyWhen = new LinkedList<>();
}
|
Ezeer/VegaStrike_win32FR | vegastrike/src/audio/renderers/OpenAL/OpenALRenderableSource.cpp | //
// C++ Implementation: Audio::OpenALRenderableListener
//
#include "OpenALRenderableSource.h"
#include "OpenALSimpleSound.h"
#include "OpenALHelpers.h"
#include "config.h"
#include "al.h"
#include "../../Source.h"
#include "../../Listener.h"
#include "vs_math.h"
namespace Audio {
static inline void alSource3f(ALuint source, ALenum param, const Vector3 &v)
{
::alSource3f(source, param, ALfloat(v.x), ALfloat(v.y), ALfloat(v.z));
}
static inline void alSource3f(ALuint source, ALenum param, const LVector3 &v)
{
::alSource3f(source, param, ALfloat(v.x), ALfloat(v.y), ALfloat(v.z));
}
OpenALRenderableSource::OpenALRenderableSource(Source *source)
: RenderableSource(source),
alSource(0),
alBuffersAttached(false)
{
alGenSources(1,&alSource);
}
OpenALRenderableSource::~OpenALRenderableSource()
{
alDeleteSources(1,&alSource);
}
void OpenALRenderableSource::startPlayingImpl(Timestamp start)
throw(Exception)
{
if (!isPlayingImpl()) {
// Make sure we have an attached sound
attachALBuffers();
// Tell the AL to start playing (from the specified position)
clearAlError();
ALuint als = getALSource();
alSourcePlay(als);
checkAlError();
if (start != 0)
seekImpl(start);
}
}
void OpenALRenderableSource::stopPlayingImpl()
throw(Exception)
{
alSourceStop(alSource);
}
bool OpenALRenderableSource::isPlayingImpl() const
throw(Exception)
{
ALint state = 0;
alGetSourcei(getALSource(), AL_SOURCE_STATE, &state);
return (state == AL_PLAYING);
}
Timestamp OpenALRenderableSource::getPlayingTimeImpl() const
throw(Exception)
{
ALfloat offs = -1.f;
alGetSourcef(getALSource(), AL_SEC_OFFSET, &offs);
if (offs < 0.f)
throw NotImplementedException("getPlayingTimeImpl");
return Timestamp(offs);
}
void OpenALRenderableSource::updateImpl(int flags, const Listener& sceneListener)
throw(Exception)
{
Source *source = getSource();
ALSourceHandle als = getALSource();
clearAlError();
if (flags & UPDATE_ATTRIBUTES) {
// Distance attenuation
if (source->isAttenuated()) {
alSourcef(als, AL_REFERENCE_DISTANCE, source->getRadius());
alSourcef(als, AL_ROLLOFF_FACTOR, 1.f);
} else {
alSourcef(als, AL_ROLLOFF_FACTOR, 0.f);
}
// Cone
{
Range<Scalar> angleRange = source->getAngleRange();
alSourcef(als, AL_CONE_INNER_ANGLE, float(angleRange.min) * M_1_PI * 360.f);
alSourcef(als, AL_CONE_OUTER_ANGLE, float(angleRange.max) * M_1_PI * 360.f);
alSourcef(als, AL_CONE_OUTER_GAIN , 0.f);
}
// Relativity
alSourcei(als, AL_SOURCE_RELATIVE, source->isRelative() ? AL_TRUE : AL_FALSE);
// Looping
alSourcei(als, AL_LOOPING, source->isLooping() ? AL_TRUE : AL_FALSE);
}
if (flags & UPDATE_GAIN) {
// Gain
alSourcef(als, AL_GAIN, source->getGain());
}
if (flags & UPDATE_LOCATION) {
if (source->isRelative()) {
alSource3f(als, AL_POSITION, source->getPosition());
alSource3f(als, AL_VELOCITY, source->getVelocity());
alSource3f(als, AL_DIRECTION, source->getDirection());
} else {
alSource3f(als, AL_POSITION,
source->getPosition() - sceneListener.getPosition() );
alSource3f(als, AL_VELOCITY,
sceneListener.toLocalDirection(
source->getVelocity() - sceneListener.getVelocity()
) );
alSource3f(als, AL_DIRECTION,
sceneListener.toLocalDirection(
source->getDirection()
) );
}
}
checkAlError();
}
void OpenALRenderableSource::attachALBuffers()
throw(Exception)
{
if (!alBuffersAttached) {
SharedPtr<Sound> sound = getSource()->getSound();
if (!sound->isLoaded())
sound->load();
assert(!sound->isStreaming() && "OpenALRenderableSource can only handle streaming sounds");
// Attachment to a simple sound, just assign the AL buffer to this AL source
ALBufferHandle alBuffer = dynamic_cast<OpenALSimpleSound*>(sound.get())->getAlBuffer();
ALSourceHandle alSource = getALSource();
alSourcei(alSource, AL_BUFFER, alBuffer);
alBuffersAttached = true;
checkAlError();
}
}
void OpenALRenderableSource::seekImpl(Timestamp time)
throw(Exception)
{
// Tell the AL to jump to the specified position
// NOTE: lots of implementations don't support it
// but according to OpenAL 1.1 specs they should
clearAlError();
ALuint als = getALSource();
alSourcef(als, AL_SEC_OFFSET, time);
ALenum error = alGetError();
if (error == ALC_INVALID_ENUM) {
// This version of the AL does not support seeking
// fail silently
// TODO: must log the fact to console as a warning
} else {
checkAlErrorCode(error);
}
}
};
|
GABRIEL08266644/Curso-Web-Moderno-Completo-com-JavaScript-2020---Projetos | Javascript/array/simulandoArray.js | const quaseArray = { 0: 'rafael', 1:'ana', 2: 'bia' }
console.log(quaseArray)
Object.defineProperty(quaseArray, 'toString', {
value: function() { return Object(this)},
enumerable: false
})
console.log(quaseArray[0])
const meuArray = ['rafael', 'ana', 'bia']
console.log(quaseArray.toString(), meuArray) |
dipsuji/Phython-Learning | practiceset/longest_polidrome.py | <filename>practiceset/longest_polidrome.py
def polindrom(str1):
last = len(str1) - 1
for i in range(0, int(len(str1) / 2)):
if str1[i] != str1[last]:
return False
last = last - 1
return True
print(polindrom("abcaba"))
def longestPalSubstr(string):
maxLength = 1
start = 0
length = len(string)
low = 0
high = 0
# One by one consider every character as center point of
# even and length palindromes
for i in range(1, length):
# Find the longest even length palindrome with center
# points as i-1 and i.
low = i - 1
high = i
while low >= 0 and high < length and string[low] == string[high]:
if high - low + 1 > maxLength:
start = low
maxLength = high - low + 1
low -= 1
high += 1
# Find the longest odd length palindrome with center
# point as i
low = i - 1
high = i + 1
while low >= 0 and high < length and string[low] == string[high]:
if high - low + 1 > maxLength:
start = low
maxLength = high - low + 1
low -= 1
high += 1
print("Longest palindrome substring is:"),
print(string[start:start + maxLength])
return maxLength
# Driver program to test above functions
string = "hisingisignishi"
print("Length is: " + str(longestPalSubstr(string)))
|
nailed/nailed-api | src/main/java/jk_5/nailed/api/plugin/Plugin.java | package jk_5.nailed.api.plugin;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* An annotation used to describe and mark a Sponge plugin
*
* @author jk-5
*/
@Target(TYPE)
@Retention(RUNTIME)
public @interface Plugin {
/**
* An ID for the plugin to be used internally. The ID should be unique as to
* not conflict with other plugins.
*
* @return The id of the plugin
*/
String id();
/**
* The human readable name of the plugin as to be used in descriptions and
* similar things.
*
* @return The human readable name of the plugin
*/
String name();
/**
* The version of the plugin.
*
* @return The version of the plugin
*/
String version() default "unknown";
}
|
MirekSz/webpack-es6-ts | app/mods/mod1806.js | <reponame>MirekSz/webpack-es6-ts
import mod1805 from './mod1805';
var value=mod1805+1;
export default value;
|
robinshi007/cx-tasks | frontend/craco.config.js | const path = require('path');
const fs = require('fs');
const CircularDependencyPlugin = require('circular-dependency-plugin');
const WebpackBar = require('webpackbar');
const appDirectory = fs.realpathSync(process.cwd());
const resolveApp = (relativePath) => path.resolve(appDirectory, relativePath);
module.exports = {
style: {
postcss: {
plugins: [require('tailwindcss')(resolveApp('tailwind.config.js')), require('autoprefixer')],
},
},
webpack: {
plugins: [
new WebpackBar(),
new CircularDependencyPlugin({
exclude: /node_modules/,
include: /src/,
failOnError: true,
allowAsyncCycles: false,
cwd: process.cwd(),
}),
],
alias: {
'@': path.resolve(__dirname, 'src'),
'@images': path.resolve(__dirname, 'src/assets/images/'),
},
},
};
|
wahello/openshift-installer | terraform/azurerm/vendor/github.com/hashicorp/terraform-provider-azurerm/internal/services/compute/validate/linux_computer_name.go | package validate
import (
"fmt"
"strings"
)
func LinuxComputerNameFull(i interface{}, k string) (warnings []string, errors []error) {
// Linux host name cannot exceed 64 characters in length
return LinuxComputerName(i, k, 64, false)
}
func LinuxComputerNamePrefix(i interface{}, k string) (warnings []string, errors []error) {
// Linux host name prefix cannot exceed 58 characters in length
return LinuxComputerName(i, k, 58, true)
}
func LinuxComputerName(i interface{}, k string, maxLength int, allowDashSuffix bool) (warnings []string, errors []error) {
v, ok := i.(string)
if !ok {
errors = append(errors, fmt.Errorf("expected %q to be a string but it wasn't!", k))
return
}
// The value must not be empty.
if strings.TrimSpace(v) == "" {
errors = append(errors, fmt.Errorf("%q must not be empty", k))
return
}
if len(v) > maxLength {
errors = append(errors, fmt.Errorf("%q can be at most %d characters, got %d", k, maxLength, len(v)))
}
if strings.HasPrefix(v, "_") {
errors = append(errors, fmt.Errorf("%q cannot begin with an underscore", k))
}
if strings.HasSuffix(v, ".") {
errors = append(errors, fmt.Errorf("%q cannot end with a period", k))
}
if !allowDashSuffix && strings.HasSuffix(v, "-") {
errors = append(errors, fmt.Errorf("%q cannot end with a dash", k))
}
// Linux host name cannot contain the following characters
specialCharacters := `\/"[]:|<>+=;,?*@&~!#$%^()_{}'`
if strings.ContainsAny(v, specialCharacters) {
errors = append(errors, fmt.Errorf("%q cannot contain the special characters: `%s`", k, specialCharacters))
}
return warnings, errors
}
|
mobarski/sandbox | topic/lda/test_lda2.py | <reponame>mobarski/sandbox
from __future__ import print_function
from time import time
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.decomposition import NMF, LatentDirichletAllocation
n_samples = 9
n_features = 100
n_components = 3
n_top_words = 6
def print_top_words(model, feature_names, n_top_words):
for topic_idx, topic in enumerate(model.components_):
message = "Topic #%d: " % topic_idx
message += " ".join([feature_names[i]
for i in topic.argsort()[:-n_top_words - 1:-1]])
print(message)
print()
print("Loading dataset...")
t0 = time()
## dataset = fetch_20newsgroups(shuffle=True, random_state=1,
## remove=('headers', 'footers', 'quotes'))
## data_samples = dataset.data[:n_samples]
data_samples = [
"reksio szczeka na koty",
"pies glosno szczeka",
"koty cicho mrucza",
"reksio to madry pies",
"kerbale buduja rakiety",
"rakiety wynosza satelity",
"satelity sa na orbicie",
"rakiety glosno startuja",
"szybowce leca cicho",
"szybowce startuja z wyciagarki",
"samoloty szturmowe leca nisko",
"krowy jedza trawe",
"kury jedza ziarno",
"krowy pija wode"
]
print("done in %0.3fs." % (time() - t0))
# Use tf-idf features for NMF.
print("Extracting tf-idf features for NMF...")
tfidf_vectorizer = TfidfVectorizer(max_df=0.95, min_df=1,
max_features=n_features,
stop_words=['na','sa','z','to'])
t0 = time()
tfidf = tfidf_vectorizer.fit_transform(data_samples)
print("done in %0.3fs." % (time() - t0))
#print(tfidf)
# Use tf (raw term count) features for LDA.
print("Extracting tf features for LDA...")
tf_vectorizer = CountVectorizer(max_df=0.95, min_df=1,
max_features=n_features,
stop_words=['na','sa','z'])
t0 = time()
tf = tf_vectorizer.fit_transform(data_samples)
tf_feature_names = tf_vectorizer.get_feature_names()
print("done in %0.3fs." % (time() - t0))
print(tf_feature_names)
# Fit the NMF model
if 1:
print("Fitting the NMF model (Frobenius norm) with tf-idf features, "
"n_samples=%d and n_features=%d..."
% (n_samples, n_features))
t0 = time()
nmf = NMF(n_components=n_components, random_state=1,
alpha=.1, l1_ratio=.5).fit(tfidf)
print("done in %0.3fs." % (time() - t0))
print("\nTopics in NMF model (Frobenius norm):")
tfidf_feature_names = tfidf_vectorizer.get_feature_names()
print_top_words(nmf, tfidf_feature_names, n_top_words)
# Fit the LDA model
if 1:
print("Fitting LDA models with tf features, "
"n_samples=%d and n_features=%d..."
% (n_samples, n_features))
lda = LatentDirichletAllocation(n_components=n_components, max_iter=50,
learning_method='online',
learning_offset=50.,
random_state=0)
t0 = time()
lda.fit(tf)
print("done in %0.3fs." % (time() - t0))
print("\nTopics in LDA model:")
tf_feature_names = tf_vectorizer.get_feature_names()
print_top_words(lda, tf_feature_names, n_top_words)
|
PinkFlufflyLlama/ttauri | src/ttauri/file_view_tests.cpp | // Copyright <NAME> 2019.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at https://www.boost.org/LICENSE_1_0.txt)
#include "ttauri/file_view.hpp"
#include "ttauri/required.hpp"
#include <gtest/gtest.h>
#include <iostream>
#include <string>
using namespace std;
using namespace tt;
TEST(file_view, read) {
ttlet view = file_view(URL("file:file_view.txt"));
ttlet *test = reinterpret_cast<char const *>(view.bytes().data());
ASSERT_TRUE(strncmp(test, "The quick brown", 15) == 0);
}
|
daemon-demon/airflow | airflow/providers/redis/operators/redis_publish.py | <filename>airflow/providers/redis/operators/redis_publish.py
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Dict
from airflow.models import BaseOperator
from airflow.providers.redis.hooks.redis import RedisHook
from airflow.utils.decorators import apply_defaults
class RedisPublishOperator(BaseOperator):
"""
Publish a message to Redis.
:param channel: redis channel to which the message is published (templated)
:type channel: str
:param message: the message to publish (templated)
:type message: str
:param redis_conn_id: redis connection to use
:type redis_conn_id: str
"""
template_fields = ('channel', 'message')
@apply_defaults
def __init__(self, *, channel: str, message: str, redis_conn_id: str = 'redis_default', **kwargs) -> None:
super().__init__(**kwargs)
self.redis_conn_id = redis_conn_id
self.channel = channel
self.message = message
def execute(self, context: Dict) -> None:
"""
Publish the message to Redis channel
:param context: the context object
:type context: dict
"""
redis_hook = RedisHook(redis_conn_id=self.redis_conn_id)
self.log.info('Sending messsage %s to Redis on channel %s', self.message, self.channel)
result = redis_hook.get_conn().publish(channel=self.channel, message=self.message)
self.log.info('Result of publishing %s', result)
|
kylearon/test1 | com.soartech.simjr.core/src/main/java/com/soartech/simjr/ui/shapes/NullShapeFactory.java | /*
* Copyright (c) 2010, Soar Technology, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of Soar Technology, Inc. nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without the specific prior written permission of Soar Technology, Inc.
*
* THIS SOFTWARE IS PROVIDED BY SOAR TECHNOLOGY, INC. AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL SOAR TECHNOLOGY, INC. OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Created on Sep 26, 2007
*/
package com.soartech.simjr.ui.shapes;
import com.soartech.shapesystem.Shape;
import com.soartech.shapesystem.ShapeSystem;
import com.soartech.shapesystem.swing.SwingPrimitiveRendererFactory;
import com.soartech.simjr.sim.Entity;
/**
* @author ray
*/
public class NullShapeFactory implements EntityShapeFactory
{
public static final EntityShapeFactory FACTORY = new NullShapeFactory();
private final EntityShapeFactory internal;
public NullShapeFactory()
{
this.internal = ImageEntityShape.create("images/shapes/unknown-entity.png");
}
/* (non-Javadoc)
* @see com.soartech.simjr.ui.shapes.EntityShapeFactory#initialize(com.soartech.shapesystem.swing.SwingPrimitiveRendererFactory)
*/
public void initialize(SwingPrimitiveRendererFactory rendererFactory)
{
this.internal.initialize(rendererFactory);
}
/* (non-Javadoc)
* @see com.soartech.simjr.ui.shapes.EntityShapeFactory#create(com.soartech.simjr.Entity, com.soartech.shapesystem.ShapeSystem)
*/
public EntityShape create(Entity entity, ShapeSystem system)
{
return internal.create(entity, system);
}
/* (non-Javadoc)
* @see com.soartech.simjr.ui.shapes.EntityShapeFactory#createSelection(java.lang.String, com.soartech.simjr.Entity)
*/
public Shape createSelection(String id, Entity selected)
{
return internal.createSelection(id, selected);
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
return "none";
}
@Override
public SwingPrimitiveRendererFactory getRendererFactory()
{
return this.internal.getRendererFactory();
}
}
|
lcy0x1/Create | src/main/java/com/simibubi/create/foundation/ponder/instruction/EmitParticlesInstruction.java | package com.simibubi.create.foundation.ponder.instruction;
import com.simibubi.create.Create;
import com.simibubi.create.foundation.ponder.PonderScene;
import com.simibubi.create.foundation.ponder.PonderWorld;
import net.minecraft.client.Minecraft;
import net.minecraft.client.particle.ParticleEngine;
import net.minecraft.core.particles.ParticleOptions;
import net.minecraft.world.phys.Vec3;
public class EmitParticlesInstruction extends TickingInstruction {
private Vec3 anchor;
private Emitter emitter;
private float runsPerTick;
@FunctionalInterface
public static interface Emitter {
public static <T extends ParticleOptions> Emitter simple(T data, Vec3 motion) {
return (w, x, y, z) -> w.addParticle(data, x, y, z, motion.x, motion.y, motion.z);
}
public static <T extends ParticleOptions> Emitter withinBlockSpace(T data, Vec3 motion) {
return (w, x, y, z) -> w.addParticle(data, Math.floor(x) + Create.RANDOM.nextFloat(),
Math.floor(y) + Create.RANDOM.nextFloat(), Math.floor(z) + Create.RANDOM.nextFloat(), motion.x,
motion.y, motion.z);
}
static ParticleEngine paticleManager() {
return Minecraft.getInstance().particleEngine;
}
public void create(PonderWorld world, double x, double y, double z);
}
public EmitParticlesInstruction(Vec3 anchor, Emitter emitter, float runsPerTick, int ticks) {
super(false, ticks);
this.anchor = anchor;
this.emitter = emitter;
this.runsPerTick = runsPerTick;
}
@Override
public void tick(PonderScene scene) {
super.tick(scene);
int runs = (int) runsPerTick;
if (Create.RANDOM.nextFloat() < (runsPerTick - runs))
runs++;
for (int i = 0; i < runs; i++)
emitter.create(scene.getWorld(), anchor.x, anchor.y, anchor.z);
}
}
|
leroynas/udacity-wyr | app/components/pages/Question/index.js | <reponame>leroynas/udacity-wyr
/**
*
* Question
*
*/
import React, { memo, useState } from 'react';
import PropTypes from 'prop-types';
import { Redirect } from 'react-router-dom';
import Container from 'components/ui/Container';
import Title from 'components/ui/Title';
import Page from 'components/ui/Page';
import Avatar from 'components/ui/Avatar';
import Flex from 'components/ui/Flex';
import Heading from 'components/ui/Heading';
import FormGroup from 'components/ui/FormGroup';
import RadioInput from 'components/ui/RadioInput';
import Button from 'components/ui/Button';
function Question({ question, currentUser, saveAnswer }) {
const [answer, setAnswer] = useState('');
const handleSaveAnswer = () =>
answer !== '' &&
saveAnswer({
qid: question.id,
uid: currentUser.id,
answer,
});
if ({}.hasOwnProperty.call(currentUser.answers, question.id)) {
return <Redirect to={`/question/${question.id}/result`} />;
}
const options = [
{ title: question.optionOne.text, value: 'optionOne' },
{ title: question.optionTwo.text, value: 'optionTwo' },
];
return (
<Container>
<Title>{`${question.authorName} asks`}</Title>
<Page>
<Flex alignItems="column" spacing="lg">
<Avatar src={question.authorAvatarURL} />
<Flex direction="column">
<Heading size="xl" spacing="md">
Would you rather...
</Heading>
<FormGroup>
<RadioInput
options={options}
value={answer}
onChange={setAnswer}
/>
</FormGroup>
</Flex>
</Flex>
<Button onClick={handleSaveAnswer}>Submit Vote</Button>
</Page>
</Container>
);
}
Question.propTypes = {
question: PropTypes.object.isRequired,
currentUser: PropTypes.object.isRequired,
saveAnswer: PropTypes.func.isRequired,
};
export default memo(Question);
|
BernardoFuret/async-tajs | test-resources/src/flowgraphbuilder/flowgraph_builder0103.js | <gh_stars>1-10
var x = {a:42}
for (var b in x) {
continue;
b = 44;
}
|
Fusion-Rom/android_external_chromium_org | athena/home/public/app_model_builder.h | <gh_stars>1-10
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ATHENA_HOME_PUBLIC_APP_MODEL_BUILDER_H_
#define ATHENA_HOME_PUBLIC_APP_MODEL_BUILDER_H_
#include "athena/athena_export.h"
namespace app_list {
class AppListModel;
} // namespace app_list
namespace athena {
// An interface to fill the list of apps in the home card.
// TODO(mukai): integrate the interface with chrome/browser/ui/app_list/
// extension_app_model_builder.
class ATHENA_EXPORT AppModelBuilder {
public:
virtual ~AppModelBuilder() {}
// Fills |model| with the currently available app_list::AppListItems.
virtual void PopulateApps(app_list::AppListModel* model) = 0;
};
} // namespace athena
#endif // ATHENA_HOME_PUBLIC_APP_MODEL_BUILDER_H_
|
cuauv/software | locator/density.py | <reponame>cuauv/software<filename>locator/density.py<gh_stars>10-100
import numpy
import pylab
import math
from math import degrees, atan2, radians, cos, sin
import random
import scipy.ndimage
num_type = numpy.float32
def normalize_volume(array):
''' Makes a new array with total density of 1 and the same shape as array '''
array /= numpy.sum(array)
def normalize_max(array):
array /= numpy.max(numpy.abs(array))
def Gauss(x, mu, sigma):
x = float(x)
mu = float(mu)
sigma = float(sigma)
return math.exp(-(x-mu)**2/(2*sigma**2))/math.sqrt(2*math.pi*sigma**2)
def MakeGauss(width,height, x,y, x_sigma, y_sigma):
''' Makes a (nearly) Gaussian distribution around point x,y of given
sigmas and of total size (width,height) '''
#w = min(6*x_sigma,width)
#h = min(6*y_sigma,height)
w= width
h = height
gxs = numpy.array([Gauss(i-w/2,0,x_sigma) for i in range(w) if i+x-w/2 >= 0 and i+x-w/2 < width], dtype = num_type)
gys = numpy.array([Gauss(j-h/2,0,y_sigma) for j in range(h) if j+y-h/2 >= 0 and j+y-h/2 < height], dtype = num_type)
mat = numpy.outer( gxs, gys )
bounds = [min(width,max(0,x-w/2)), min(width, max(0,x+w/2)), min(height, max(0,y-h/2)), min(height, max(0,y+h/2))]
d = numpy.ones((width,height), dtype = num_type)*gxs[0]*gys[0]
d[bounds[0]:bounds[1], bounds[2]:bounds[3]] = mat
return d
def MakeUniform(w,h):
''' Makes a uniform probability density function of a given size '''
return numpy.ones((w,h), dtype = num_type) / (w*h)
def Infer( H, EH ):
'''
Return a new density which gives the probability of the hypothesis
at each point given a piece of evidence E which has probability
P(E|H) = EH at each point
Applies Bayesian Inference at each point.
H is the probability of a hypothesis (eg: location of something is (x,y))
EH is the probability of evidence E arising given that hypothesis H is
true at the point (x,y)
'''
temp = H*EH
return temp / numpy.sum(temp)
class Evidence:
def __init__(self, array, prob_false_positive, prob_false_negative, blur=0, center = (0,0)):
#self.E = array*(1-prob_false_negative) + (1-array)*prob_false_positive
self.E = array
self.prob_false_positive = prob_false_positive
self.prob_false_negative = prob_false_negative
self.center = center
if blur != 0:
self.E = scipy.ndimage.filters.gaussian_filter(self.E, blur, 0, mode="constant")
def InferFor(self, H, x,y, angle):
''' Updates H (in place) given the evidence occuring at x,y '''
E = scipy.ndimage.interpolation.rotate(self.E, -angle)
E *= (1-self.prob_false_negative-self.prob_false_positive)
w,h = E.shape
cx = self.center[0]*cos(radians(angle)) + self.center[1]*sin(radians(angle))
cy = -self.center[0]*sin(radians(angle)) + self.center[1]*cos(radians(angle))
ox = E.shape[0]/2+cx #offset x
oy = E.shape[1]/2+cy #offset y
# Bayesian Inference
block = H[x-ox:x+w-ox,y-oy:y+h-oy].copy()
H *= self.prob_false_positive
H[x-ox:x+w-ox, y-oy:y+h-oy] += E*block
H /= numpy.sum(H)
#pylab.plot( [y-oy, y+h-oy], [x-ox, x+w-ox] , "o")
def InferAgainst(self, H, x,y, angle):
''' Updates H (in place) given the evidence not occuring at x,y '''
E = scipy.ndimage.interpolation.rotate(self.E, -angle)
E *= (self.prob_false_negative-(1-self.prob_false_positive))
w,h = E.shape
cx = self.center[0]*cos(radians(angle)) + self.center[1]*sin(radians(angle))
cy = -self.center[0]*sin(radians(angle)) + self.center[1]*cos(radians(angle))
ox = E.shape[0]/2+cx #offset x
oy = E.shape[1]/2+cy #offset y
# Bayesian Inference
block = H[x-ox:x+w-ox,y-oy:y+h-oy].copy()
H *= (1-self.prob_false_positive)
H[x-ox:x+w-ox, y-oy:y+h-oy] += E*block
H /= numpy.sum(H)
# For timeit testing
test_prep = """
import density
import numpy
num_type = numpy.float32
E = density.Evidence(numpy.ones((100,100), dtype = num_type),0.2,0.2,blur=1)
I = numpy.ones( (1000,1000), dtype = num_type )
"""
test = "E.InferFor(I, 500,500, 0)"
#Actual script
if __name__ == "__main__":
w,h = 500,500 # Should be even
H = MakeGauss(w,h, 250,250, 50,50)
I = H
import matplotlib
import matplotlib.colors
norm = matplotlib.colors.LogNorm(1e-7,1)
imgplot = pylab.imshow(I, cmap=matplotlib.cm.hot, picker=True, norm=norm)
imgplot.set_interpolation("nearest")
ticks =[1,1e-1,1e-2,1e-3,1e-4,1e-5,1e-6]
colorbar = pylab.colorbar(ticks=ticks)
colorbar.set_ticklabels([str(x) for x in ticks])
shape = numpy.zeros( (100,100), dtype = num_type )
shape[10:-10,10:-10] = numpy.ones( (80,80), dtype = num_type )
shape[40:60,40:60] = numpy.zeros( (20,20), dtype = num_type)
#shape = numpy.ones( (5,100), dtype = num_type )
i = 0
E = Evidence(shape, 0.2,0.2, blur=5, center=(0,-50))
def on_pick(event):
mouseevent = event.mouseevent
my,mx = mouseevent.xdata, mouseevent.ydata
global I
if mouseevent.button == 1:
E.InferFor(I, mx, my, 45)
elif mouseevent.button == 3:
E.InferAgainst(I, mx, my, 45)
imgplot.set_data(I)
pylab.draw()
pylab.gcf().canvas.mpl_connect("pick_event", on_pick)
pylab.show()
|
TrustedBSD/sebsd | tools/regression/mqueue/mqtest1/mqtest1.c | <reponame>TrustedBSD/sebsd
/* $FreeBSD: src/tools/regression/mqueue/mqtest1/mqtest1.c,v 1.1 2005/11/26 13:19:08 davidxu Exp $ */
#include <stdio.h>
#include <mqueue.h>
#include <fcntl.h>
#include <signal.h>
#include <errno.h>
#define MQNAME "/mytstqueue1"
int main()
{
struct mq_attr attr, attr2;
struct sigevent sigev;
int mq;
int status;
attr.mq_maxmsg = 2;
attr.mq_msgsize = 100;
mq = mq_open(MQNAME, O_CREAT | O_RDWR | O_EXCL, 0666, &attr);
if (mq == -1)
err(1, "mq_open");
status = mq_unlink(MQNAME);
if (status)
err(1, "mq_unlink");
status = mq_getattr(mq, &attr2);
if (status)
err(1, "mq_getattr");
if (attr.mq_maxmsg != attr2.mq_maxmsg)
err(1, "mq_maxmsg changed");
if (attr.mq_msgsize != attr2.mq_msgsize)
err(1, "mq_msgsize changed");
sigev.sigev_notify = SIGEV_SIGNAL;
sigev.sigev_signo = SIGRTMIN;
status = mq_notify(mq, &sigev);
if (status)
err(1, "mq_notify");
status = mq_notify(mq, &sigev);
if (status == 0)
err(1, "mq_notify 2");
else if (errno != EBUSY)
err(1, "mq_notify 3");
status = mq_notify(mq, NULL);
if (status)
err(1, "mq_notify 4");
status = mq_close(mq);
if (status)
err(1, "mq_close");
return (0);
}
|
Lavish883/Loki-Stream | node_modules/@expo/config-plugins/build/utils/modules.js | version https://git-lfs.github.com/spec/v1
oid sha256:da8d72ea694d0371e8ada19d8b30f1bde46717e2c01834623ee3010ad03a87e6
size 1030
|
peurpdapeurp/ndnrtc | cpp/tests/test-audio-playout.cc | <filename>cpp/tests/test-audio-playout.cc
//
// test-audio-playout.cc
//
// Created by <NAME> on 18 May 2016.
// Copyright 2013-2016 Regents of the University of California
//
#include <stdlib.h>
#include "gtest/gtest.h"
#include "tests-helpers.hpp"
#include "frame-data.hpp"
#include "src/audio-playout.hpp"
#include "audio-thread.hpp"
#include "clock.hpp"
#include "include/params.hpp"
#include "audio-capturer.hpp"
#include "statistics.hpp"
#include "mock-objects/audio-thread-callback-mock.hpp"
#include "mock-objects/buffer-observer-mock.hpp"
#include "mock-objects/playback-queue-observer-mock.hpp"
#include "mock-objects/playout-observer-mock.hpp"
// #define ENABLE_LOGGING
using namespace testing;
using namespace ndnrtc;
using namespace ndn;
using namespace boost::chrono;
using namespace ndnrtc::statistics;
TEST(TestAudioPlayout, TestG722)
{
#ifdef ENABLE_LOGGING
ndnlog::new_api::Logger::initAsyncLogging();
ndnlog::new_api::Logger::getLogger("").setLogLevel(ndnlog::NdnLoggerDetailLevelAll);
#endif
boost::asio::io_service io;
boost::shared_ptr<boost::asio::io_service::work> work(boost::make_shared<boost::asio::io_service::work>(io));
boost::thread t([&io](){
io.run();
});
boost::asio::deadline_timer runTimer(io);
int runTime = 5000;
int oneWayDelay = 100; // milliseconds
int deviation = 50;
int targetSize = oneWayDelay+deviation;
double captureFps = 40;
int samplePeriod = (int)(1000./captureFps);
int pipeline = 2*round((double)targetSize/(double)samplePeriod)+1;
MockAudioThreadCallback callback;
AudioThreadParams ap("hd", "g722");
AudioCaptureParams acp;
acp.deviceId_ = 0;
int wire_length = 1000;
boost::shared_ptr<AudioBundlePacket> bundle(boost::make_shared<AudioBundlePacket>(wire_length));
AudioThread at(ap, acp, &callback, wire_length);
int nBundles = 0;
uint64_t bundleNo = 0;
high_resolution_clock::time_point callbackTs;
DelayQueue queue(io, oneWayDelay, deviation);
DataCache cache;
std::string streamPrefix = "/ndn/edu/ucla/remap/peter/ndncon/instance1/ndnrtc/%FD%02/audio/mic";
std::string threadPrefix = "/ndn/edu/ucla/remap/peter/ndncon/instance1/ndnrtc/%FD%02/audio/mic/hd";
MockBufferObserver bobserver;
MockPlaybackQueueObserver pobserver;
boost::shared_ptr<SlotPool> pool(boost::make_shared<SlotPool>(pipeline*5)); // make sure we re-use slots
boost::shared_ptr<StatisticsStorage> storage(StatisticsStorage::createConsumerStatistics());
boost::shared_ptr<Buffer> buffer(boost::make_shared<Buffer>(storage, pool));
boost::shared_ptr<PlaybackQueue> pqueue(boost::make_shared<PlaybackQueue>(Name(streamPrefix), buffer));
MockPlayoutObserver playoutObserver;
AudioPlayout playout(io, pqueue);
#ifdef ENABLE_LOGGING
playout.setLogger(&ndnlog::new_api::Logger::getLogger(""));
at.setLogger(&ndnlog::new_api::Logger::getLogger(""));
#endif
// buffer->attach(&bobserver);
pqueue->attach(&pobserver);
playout.attach(&playoutObserver);
#ifdef ENABLE_LOGGING
buffer->setDescription("buffer");
buffer->setLogger(&ndnlog::new_api::Logger::getLogger(""));
pqueue->setDescription("pqueue");
pqueue->setLogger(&ndnlog::new_api::Logger::getLogger(""));
#endif
boost::function<void(std::string, uint64_t, boost::shared_ptr<AudioBundlePacket>)> onBundle = [&callbackTs, wire_length,
&bundle, &nBundles, &bundleNo, &cache, &queue, threadPrefix](std::string, uint64_t n, boost::shared_ptr<AudioBundlePacket> b){
bundle->swap(*b);
CommonHeader hdr;
hdr.sampleRate_ = 25.;
hdr.publishTimestampMs_ = clock::millisecondTimestamp();
hdr.publishUnixTimestamp_ = clock::unixTimestamp();
bundle->setHeader(hdr);
std::vector<CommonSegment> segments = CommonSegment::slice(*bundle, 1000);
int idx = 0;
for (auto& s:segments)
{
boost::shared_ptr<NetworkData> segmentData = s.getNetworkData();
Name segmentName(threadPrefix);
segmentName.appendSequenceNumber(nBundles).appendSegment(idx);
boost::shared_ptr<ndn::Data> d(boost::make_shared<ndn::Data>(segmentName));
d->getMetaInfo().setFreshnessPeriod(1000);
d->getMetaInfo().setFinalBlockId(ndn::Name::Component::fromSegment(segments.size()-1));
d->setContent(segmentData->getData(), segmentData->getLength());
cache.addData(d);
idx++;
#ifdef ENABLE_LOGGING
LogDebug("") << "published " << d->getName() << std::endl;
#endif
}
nBundles++;
};
EXPECT_CALL(callback, onSampleBundle("hd",_, _))
.Times(AtLeast(1))
.WillRepeatedly(Invoke(onBundle));
boost::function<void(boost::shared_ptr<WireData<DataSegmentHeader>>)> onDataArrived;
boost::function<void(PacketNumber pno)> requestFrame = [&queue, &cache, buffer,
&onDataArrived, threadPrefix]
(PacketNumber pno)
{
Name frameName(threadPrefix);
frameName.appendSequenceNumber(pno).appendSegment(0);
boost::shared_ptr<ndn::Interest> i(boost::make_shared<ndn::Interest>(frameName,1000));
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
i->setNonce(Blob((uint8_t*)&pno, sizeof(PacketNumber)));
#pragma GCC diagnostic pop
std::vector<boost::shared_ptr<ndn::Interest>> interests;
interests.push_back(i);
buffer->requested(makeInterestsConst(interests));
#ifdef ENABLE_LOGGING
LogDebug("") << "express " << i->getName() << std::endl;
#endif
queue.push([i, &cache, &queue, buffer, &onDataArrived](){
cache.addInterest(i, [&queue, buffer, &onDataArrived](const boost::shared_ptr<ndn::Data>& d, const boost::shared_ptr<ndn::Interest> i){
queue.push([buffer, &onDataArrived, d,i ](){
#ifdef ENABLE_LOGGING
LogDebug("") << "received " << d->getName() << std::endl;
#endif
boost::shared_ptr<WireData<DataSegmentHeader>> data =
boost::make_shared<WireData<DataSegmentHeader>>(d, i);
onDataArrived(data);
BufferReceipt r = buffer->received(data);
});
});
});
};
int nRequested = 0;
onDataArrived = [&requestFrame, &nRequested]
(boost::shared_ptr<WireData<DataSegmentHeader>> data)
{
requestFrame(nRequested);
nRequested++;
};
for (nRequested = 0; nRequested < pipeline; ++nRequested)
requestFrame(nRequested);
int queuSizeAccum = 0;
int nQueueSize = 0;
boost::atomic<bool> done(false);
EXPECT_CALL(pobserver, onNewSampleReady())
.Times(AtLeast(1))
.WillRepeatedly(Invoke([&done, &playout, targetSize, pqueue,
&queuSizeAccum, &nQueueSize]()
{
if (!done && pqueue->size() >= targetSize && !playout.isRunning())
playout.start();
queuSizeAccum += pqueue->size();
nQueueSize++;
}));
int drainCount = 0;
EXPECT_CALL(playoutObserver, onQueueEmpty())
.Times(AtLeast(0))
.WillRepeatedly(Invoke([&drainCount](){
drainCount++;
}));
at.start();
runTimer.expires_from_now(boost::posix_time::milliseconds(runTime));
runTimer.wait();
EXPECT_CALL(playoutObserver, onQueueEmpty())
.Times(1)
.WillOnce(Invoke([&playout, &done, &t, &work](){
done = true;
playout.stop();
work.reset();
}));
at.stop();
queue.reset();
t.join();
#ifdef ENABLE_LOGGING
ndnlog::new_api::Logger::releaseAsyncLogging();
#endif
GT_PRINTF("Queue drain count %d, Avg play queue size: %.2fms (target %dms)\n",
drainCount, (double)queuSizeAccum/(double)nQueueSize, targetSize);
}
TEST(TestAudioPlayout, TestOpus)
{
#ifdef ENABLE_LOGGING
ndnlog::new_api::Logger::initAsyncLogging();
ndnlog::new_api::Logger::getLogger("").setLogLevel(ndnlog::NdnLoggerDetailLevelDebug);
#endif
boost::asio::io_service io;
boost::shared_ptr<boost::asio::io_service::work> work(boost::make_shared<boost::asio::io_service::work>(io));
boost::thread t([&io](){
io.run();
});
boost::asio::deadline_timer runTimer(io);
int runTime = 5000;
int oneWayDelay = 100; // milliseconds
int deviation = 50;
int targetSize = oneWayDelay+deviation;
double captureFps = 40;
int samplePeriod = (int)(1000./captureFps);
int pipeline = 2*round((double)targetSize/(double)samplePeriod)+1;
MockAudioThreadCallback callback;
AudioThreadParams ap("hd", "opus");
AudioCaptureParams acp;
acp.deviceId_ = 0;
int wire_length = 1000;
boost::shared_ptr<AudioBundlePacket> bundle(boost::make_shared<AudioBundlePacket>(wire_length));
AudioThread at(ap, acp, &callback, wire_length);
int nBundles = 0;
uint64_t bundleNo = 0;
high_resolution_clock::time_point callbackTs;
DelayQueue queue(io, oneWayDelay, deviation);
DataCache cache;
std::string streamPrefix = "/ndn/edu/ucla/remap/peter/ndncon/instance1/ndnrtc/%FD%02/audio/mic";
std::string threadPrefix = "/ndn/edu/ucla/remap/peter/ndncon/instance1/ndnrtc/%FD%02/audio/mic/hd";
MockBufferObserver bobserver;
MockPlaybackQueueObserver pobserver;
boost::shared_ptr<SlotPool> pool(boost::make_shared<SlotPool>(pipeline*5)); // make sure we re-use slots
boost::shared_ptr<StatisticsStorage> storage(StatisticsStorage::createConsumerStatistics());
boost::shared_ptr<Buffer> buffer(boost::make_shared<Buffer>(storage, pool));
boost::shared_ptr<PlaybackQueue> pqueue(boost::make_shared<PlaybackQueue>(Name(streamPrefix), buffer));
MockPlayoutObserver playoutObserver;
AudioPlayout playout(io, pqueue, boost::shared_ptr<StatStorage>(StatStorage::createConsumerStatistics()),
WebrtcAudioChannel::Codec::Opus, 0);
#ifdef ENABLE_LOGGING
playout.setLogger(&ndnlog::new_api::Logger::getLogger(""));
#endif
// buffer->attach(&bobserver);
pqueue->attach(&pobserver);
playout.attach(&playoutObserver);
#ifdef ENABLE_LOGGING
buffer->setDescription("buffer");
buffer->setLogger(&ndnlog::new_api::Logger::getLogger(""));
pqueue->setDescription("pqueue");
pqueue->setLogger(&ndnlog::new_api::Logger::getLogger(""));
#endif
boost::function<void(std::string, uint64_t, boost::shared_ptr<AudioBundlePacket>)> onBundle = [&callbackTs, wire_length,
&bundle, &nBundles, &bundleNo, &cache, &queue, threadPrefix](std::string, uint64_t n, boost::shared_ptr<AudioBundlePacket> b){
bundle->swap(*b);
CommonHeader hdr;
hdr.sampleRate_ = 25.;
hdr.publishTimestampMs_ = clock::millisecondTimestamp();
hdr.publishUnixTimestamp_ = clock::unixTimestamp();
bundle->setHeader(hdr);
std::vector<CommonSegment> segments = CommonSegment::slice(*bundle, 1000);
int idx = 0;
for (auto& s:segments)
{
boost::shared_ptr<NetworkData> segmentData = s.getNetworkData();
Name segmentName(threadPrefix);
segmentName.appendSequenceNumber(nBundles).appendSegment(idx);
boost::shared_ptr<ndn::Data> d(boost::make_shared<ndn::Data>(segmentName));
d->getMetaInfo().setFreshnessPeriod(1000);
d->getMetaInfo().setFinalBlockId(ndn::Name::Component::fromSegment(segments.size()-1));
d->setContent(segmentData->getData(), segmentData->getLength());
cache.addData(d);
idx++;
#ifdef ENABLE_LOGGING
LogDebug("") << "published " << d->getName() << std::endl;
#endif
}
nBundles++;
};
EXPECT_CALL(callback, onSampleBundle("hd",_, _))
.Times(AtLeast(1))
.WillRepeatedly(Invoke(onBundle));
boost::function<void(boost::shared_ptr<WireData<DataSegmentHeader>>)> onDataArrived;
boost::function<void(PacketNumber pno)> requestFrame = [&queue, &cache, buffer,
&onDataArrived, threadPrefix]
(PacketNumber pno)
{
Name frameName(threadPrefix);
frameName.appendSequenceNumber(pno).appendSegment(0);
boost::shared_ptr<ndn::Interest> i(boost::make_shared<ndn::Interest>(frameName,1000));
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
i->setNonce(Blob((uint8_t*)&pno, sizeof(PacketNumber)));
#pragma GCC diagnostic pop
std::vector<boost::shared_ptr<ndn::Interest>> interests;
interests.push_back(i);
buffer->requested(makeInterestsConst(interests));
#ifdef ENABLE_LOGGING
LogDebug("") << "express " << i->getName() << std::endl;
#endif
queue.push([i, &cache, &queue, buffer, &onDataArrived](){
cache.addInterest(i, [&queue, buffer, &onDataArrived](const boost::shared_ptr<ndn::Data>& d, const boost::shared_ptr<ndn::Interest> i){
queue.push([buffer, &onDataArrived, d, i](){
#ifdef ENABLE_LOGGING
LogDebug("") << "received " << d->getName() << std::endl;
#endif
boost::shared_ptr<WireData<DataSegmentHeader>> data =
boost::make_shared<WireData<DataSegmentHeader>>(d, i);
onDataArrived(data);
BufferReceipt r = buffer->received(data);
});
});
});
};
int nRequested = 0;
onDataArrived = [&requestFrame, &nRequested]
(boost::shared_ptr<WireData<DataSegmentHeader>> data)
{
requestFrame(nRequested);
nRequested++;
};
for (nRequested = 0; nRequested < pipeline; ++nRequested)
requestFrame(nRequested);
int queuSizeAccum = 0;
int nQueueSize = 0;
boost::atomic<bool> done(false);
EXPECT_CALL(pobserver, onNewSampleReady())
.Times(AtLeast(1))
.WillRepeatedly(Invoke([&done, &playout, targetSize, pqueue,
&queuSizeAccum, &nQueueSize]()
{
if (!done && pqueue->size() >= targetSize && !playout.isRunning())
playout.start();
queuSizeAccum += pqueue->size();
nQueueSize++;
}));
int drainCount = 0;
EXPECT_CALL(playoutObserver, onQueueEmpty())
.Times(AtLeast(0))
.WillRepeatedly(Invoke([&drainCount](){
drainCount++;
}));
at.start();
runTimer.expires_from_now(boost::posix_time::milliseconds(runTime));
runTimer.wait();
EXPECT_CALL(playoutObserver, onQueueEmpty())
.Times(1)
.WillOnce(Invoke([&playout, &done, &t, &work](){
done = true;
playout.stop();
work.reset();
}));
at.stop();
queue.reset();
t.join();
GT_PRINTF("Queue drain count %d, Avg play queue size: %.2fms (target %dms)\n",
drainCount, (double)queuSizeAccum/(double)nQueueSize, targetSize);
}
int main(int argc, char **argv) {
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
|
dawmlight/tools_oat | src/main/java/ohos/oat/analysis/OatMainAnalyser.java | /*
* Copyright (c) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ChangeLog:
* 2021.2 - Change the file analyse logic to OhosProcessor
* Modified by jalenchen
*/
package ohos.oat.analysis;
import static org.apache.rat.api.MetaData.RAT_URL_DOCUMENT_CATEGORY;
import ohos.oat.config.OatConfig;
import ohos.oat.document.OatFileDocument;
import ohos.oat.utils.OatFileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.rat.analysis.RatHeaderAnalysisException;
import org.apache.rat.api.Document;
import org.apache.rat.api.MetaData;
import org.apache.rat.document.IDocumentAnalyser;
import org.apache.rat.document.RatDocumentAnalysisException;
import java.io.IOException;
import java.io.Reader;
/**
* Main analyser of oat
*
* @author chenyaxun
* @since 1.0
*/
public class OatMainAnalyser implements IDocumentAnalyser {
private final OatConfig oatConfig;
public OatMainAnalyser(final OatConfig oatConfig) {
super();
this.oatConfig = oatConfig;
}
@Override
public void analyse(final Document subject) throws RatDocumentAnalysisException {
OatFileDocument document = null;
if (subject instanceof OatFileDocument) {
document = (OatFileDocument) subject;
}
if (null == document) {
return;
}
final MetaData.Datum documentCategory;
if (OatFileUtils.isArchiveFile(subject) || OatFileUtils.isBinaryFile(subject)) {
return;
} else {
if (document.isDirectory()) {
documentCategory = new MetaData.Datum(RAT_URL_DOCUMENT_CATEGORY, "Directory");
} else {
documentCategory = MetaData.RAT_DOCUMENT_CATEGORY_DATUM_STANDARD;
Reader reader = null;
try {
reader = subject.reader();
final OatProcessor worker = new OatProcessor(reader, subject, this.oatConfig);
worker.read();
} catch (final IOException e) {
throw new RatDocumentAnalysisException("Cannot read header", e);
} catch (final RatHeaderAnalysisException e) {
throw new RatDocumentAnalysisException("Cannot analyse header", e);
} finally {
IOUtils.closeQuietly(reader);
}
}
}
subject.getMetaData().set(documentCategory);
}
}
|
bocke/ucc | test/cases/inline/inline_stret.c | <gh_stars>10-100
// RUN: %ocheck 10 %s -fno-semantic-interposition
void abort(void) __attribute__((noreturn));
typedef struct A { long i, j, k; } A;
__attribute((always_inline))
inline A f(int k, int cond)
{
if(cond){
A local = { .i = 99, .k = k };
return local;
}
return (A){ .i = 3, .k = k };
}
main()
{
#include "../ocheck-init.c"
if(f(1, 1).i != 99)
abort();
if(f(1, 1).i + f(5, 1).k != 104)
abort();
// 3 + 7
return f(2941, 0).i + f(7, 0).k;
}
|
LazarJovic/literary-association | Bitcoin-Payment-Service/src/main/java/goveed20/BitcoinPaymentService/BitcoinPaymentServiceApplication.java | <reponame>LazarJovic/literary-association<gh_stars>0
package goveed20.BitcoinPaymentService;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.scheduling.annotation.EnableAsync;
@SpringBootApplication(scanBasePackages = {"goveed20.PaymentConcentrator.payment.concentrator.plugin", "goveed20.BitcoinPaymentService"})
@EnableDiscoveryClient
@EnableFeignClients(basePackages = {"goveed20.PaymentConcentrator.payment.concentrator.plugin"})
@EnableAsync
public class BitcoinPaymentServiceApplication {
public static void main(String[] args) {
SpringApplication.run(BitcoinPaymentServiceApplication.class, args);
}
}
|
uxDaniel/homebrew-fonts | Casks/font-gandom.rb | cask 'font-gandom' do
version '0.3'
sha256 '6a0084ffe9a57744e4c17f9a80c6417e42c3ff28c01bfbec8d399ea2683c4a4b'
url "https://github.com/rastikerdar/gandom-font/releases/download/v#{version}/gandom-font-v#{version}.zip"
appcast 'https://github.com/rastikerdar/gandom-font/releases.atom',
checkpoint: '644c9b56d1a64cc7c0569740b8f209cc59d06a3dc21e2e5bba84e9cf79660bab'
name 'Gandom'
homepage 'http://rastikerdar.github.io/gandom-font'
license :ofl
font 'Gandom.ttf'
font 'Gandom-Bold.ttf'
end
|
jingshanccc/course | file/proto/file/file.pb.micro.go | // Code generated by protoc-gen-micro. DO NOT EDIT.
// source: gitee.com/jingshanccc/course/file/proto/file/file.proto
package file
import (
fmt "fmt"
dto "gitee.com/jingshanccc/course/file/proto/dto"
basic "gitee.com/jingshanccc/course/public/proto/basic"
proto "github.com/golang/protobuf/proto"
math "math"
)
import (
context "context"
api "github.com/micro/go-micro/v2/api"
client "github.com/micro/go-micro/v2/client"
server "github.com/micro/go-micro/v2/server"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
// Reference imports to suppress errors if they are not otherwise used.
var _ api.Endpoint
var _ context.Context
var _ client.Option
var _ server.Option
// Api Endpoints for FileService service
func NewFileServiceEndpoints() []*api.Endpoint {
return []*api.Endpoint{}
}
// Client API for FileService service
type FileService interface {
Upload(ctx context.Context, in *dto.FileDto, opts ...client.CallOption) (*dto.FileDto, error)
Check(ctx context.Context, in *basic.String, opts ...client.CallOption) (*dto.FileDto, error)
VerifyUpload(ctx context.Context, in *basic.String, opts ...client.CallOption) (*dto.VerifyRes, error)
UploadShard(ctx context.Context, in *dto.FileShardDto, opts ...client.CallOption) (*basic.Boolean, error)
Merge(ctx context.Context, in *dto.FileDto, opts ...client.CallOption) (*dto.FileDto, error)
Cancel(ctx context.Context, in *basic.String, opts ...client.CallOption) (*basic.String, error)
}
type fileService struct {
c client.Client
name string
}
func NewFileService(name string, c client.Client) FileService {
return &fileService{
c: c,
name: name,
}
}
func (c *fileService) Upload(ctx context.Context, in *dto.FileDto, opts ...client.CallOption) (*dto.FileDto, error) {
req := c.c.NewRequest(c.name, "FileService.Upload", in)
out := new(dto.FileDto)
err := c.c.Call(ctx, req, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *fileService) Check(ctx context.Context, in *basic.String, opts ...client.CallOption) (*dto.FileDto, error) {
req := c.c.NewRequest(c.name, "FileService.Check", in)
out := new(dto.FileDto)
err := c.c.Call(ctx, req, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *fileService) VerifyUpload(ctx context.Context, in *basic.String, opts ...client.CallOption) (*dto.VerifyRes, error) {
req := c.c.NewRequest(c.name, "FileService.VerifyUpload", in)
out := new(dto.VerifyRes)
err := c.c.Call(ctx, req, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *fileService) UploadShard(ctx context.Context, in *dto.FileShardDto, opts ...client.CallOption) (*basic.Boolean, error) {
req := c.c.NewRequest(c.name, "FileService.UploadShard", in)
out := new(basic.Boolean)
err := c.c.Call(ctx, req, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *fileService) Merge(ctx context.Context, in *dto.FileDto, opts ...client.CallOption) (*dto.FileDto, error) {
req := c.c.NewRequest(c.name, "FileService.Merge", in)
out := new(dto.FileDto)
err := c.c.Call(ctx, req, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *fileService) Cancel(ctx context.Context, in *basic.String, opts ...client.CallOption) (*basic.String, error) {
req := c.c.NewRequest(c.name, "FileService.Cancel", in)
out := new(basic.String)
err := c.c.Call(ctx, req, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// Server API for FileService service
type FileServiceHandler interface {
Upload(context.Context, *dto.FileDto, *dto.FileDto) error
Check(context.Context, *basic.String, *dto.FileDto) error
VerifyUpload(context.Context, *basic.String, *dto.VerifyRes) error
UploadShard(context.Context, *dto.FileShardDto, *basic.Boolean) error
Merge(context.Context, *dto.FileDto, *dto.FileDto) error
Cancel(context.Context, *basic.String, *basic.String) error
}
func RegisterFileServiceHandler(s server.Server, hdlr FileServiceHandler, opts ...server.HandlerOption) error {
type fileService interface {
Upload(ctx context.Context, in *dto.FileDto, out *dto.FileDto) error
Check(ctx context.Context, in *basic.String, out *dto.FileDto) error
VerifyUpload(ctx context.Context, in *basic.String, out *dto.VerifyRes) error
UploadShard(ctx context.Context, in *dto.FileShardDto, out *basic.Boolean) error
Merge(ctx context.Context, in *dto.FileDto, out *dto.FileDto) error
Cancel(ctx context.Context, in *basic.String, out *basic.String) error
}
type FileService struct {
fileService
}
h := &fileServiceHandler{hdlr}
return s.Handle(s.NewHandler(&FileService{h}, opts...))
}
type fileServiceHandler struct {
FileServiceHandler
}
func (h *fileServiceHandler) Upload(ctx context.Context, in *dto.FileDto, out *dto.FileDto) error {
return h.FileServiceHandler.Upload(ctx, in, out)
}
func (h *fileServiceHandler) Check(ctx context.Context, in *basic.String, out *dto.FileDto) error {
return h.FileServiceHandler.Check(ctx, in, out)
}
func (h *fileServiceHandler) VerifyUpload(ctx context.Context, in *basic.String, out *dto.VerifyRes) error {
return h.FileServiceHandler.VerifyUpload(ctx, in, out)
}
func (h *fileServiceHandler) UploadShard(ctx context.Context, in *dto.FileShardDto, out *basic.Boolean) error {
return h.FileServiceHandler.UploadShard(ctx, in, out)
}
func (h *fileServiceHandler) Merge(ctx context.Context, in *dto.FileDto, out *dto.FileDto) error {
return h.FileServiceHandler.Merge(ctx, in, out)
}
func (h *fileServiceHandler) Cancel(ctx context.Context, in *basic.String, out *basic.String) error {
return h.FileServiceHandler.Cancel(ctx, in, out)
}
|
hmrc/residence-nil-rate-band-calculator-frontend | app/uk/gov/hmrc/residencenilratebandcalculator/json/JsonErrorProcessor.scala | /*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.residencenilratebandcalculator.json
import play.api.libs.json.{JsPath, JsonValidationError}
object JsonErrorProcessor {
private def validationErrorToString(v: JsonValidationError): String = {
v.messages.foldLeft(new StringBuilder())(_ append _).toString()
}
private def errorTupleToString(t: (JsPath, Seq[JsonValidationError])): String = {
val validationErrors = t._2.map(validationErrorToString).foldLeft(new StringBuilder())(_ append _).toString()
"JSON error: " + validationErrors + "\n"
}
def apply(errs: Seq[(JsPath, Seq[JsonValidationError])]): String = {
errs.map(errorTupleToString).foldLeft(new StringBuilder())(_ append _).toString()
}
}
|
mario2904/ICOM5016-Project | app/components/administrator.js | import React, { Component } from 'react';
import { Link } from 'react-router';
import { Header, List, Icon, Button } from 'semantic-ui-react';
import AdministratorTableAssociations from './administrator-table-associations';
export default class Administrator extends Component {
render () {
return (
<div>
<Header size='huge'>Administrator Dashboard</Header>
<List celled>
<List.Item>
<List.Content floated='right'>
<Button as={Link} to='/administrator/associations'>View All</Button>
</List.Content>
<Header as='h3'>
<Icon name='university'/>
{' '} Associations
</Header>
</List.Item>
<List.Item>
<List.Content floated='right'>
<Button as={Link} to='/administrator/students'>View All</Button>
</List.Content>
<Header as='h3'>
<Icon name='user'/>
{' '} Students
</Header>
</List.Item>
<List.Item>
<List.Content floated='right'>
<Button as={Link} to='/administrator/events'>View All</Button>
</List.Content>
<Header as='h3'>
<Icon name='calendar'/>
{' '} Events
</Header>
</List.Item>
</List>
</div>
);
}
}
|
best08618/asylo | gcc-gcc-7_3_0-release/gcc/testsuite/gcc.dg/pr81650.c | /* PR driver/81650 */
/* { dg-do compile } */
/* { dg-options "-Walloc-size-larger-than=9223372036854775807" } */
void *
foo (void)
{
return __builtin_malloc (5);
}
|
jce-caba/GtkQR | include/QrcalculateMask.h | #ifndef QRCALCULATEMASK_H_INCLUDED
#define QRCALCULATEMASK_H_INCLUDED
#include <QrDefinitions.h>
long getpointMask(char **,QR_Data *);
long getpointMask_micro_QR(char **,QR_Data *);
#endif // QRCALCULATEMASK_H_INCLUDED
|
opengauss-mirror/CM | src/cm_server/cms_barrier_check.cpp | /*
* Copyright (c) 2021 Huawei Technologies Co.,Ltd.
*
* CM is licensed under Mulan PSL v2.
* You can use this software according to the terms and conditions of the Mulan PSL v2.
* You may obtain a copy of Mulan PSL v2 at:
*
* http://license.coscl.org.cn/MulanPSL2
*
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
* See the Mulan PSL v2 for more details.
* -------------------------------------------------------------------------
*
* cms_barrier_check.cpp
* barries functions
*
* IDENTIFICATION
* src/cm_server/cms_barrier_check.cpp
*
* -------------------------------------------------------------------------
*/
#include "cm/elog.h"
#include "cms_alarm.h"
#include "cms_global_params.h"
#include "cms_barrier_check.h"
#include "cms_ddb_adapter.h"
#define IS_MAJORITY(sum, alive) (((sum) != 0) && (2 * (alive) > (sum)))
#define HALF_COUNT(count) ((count) / 2)
static uint32 g_cnMajorityNum;
static uint32 g_cnCount;
static struct timespec g_lastTime;
static struct timespec g_curTime;
static bool IsRoleCnAlive(const cm_instance_report_status *instanceStatus)
{
if (instanceStatus->coordinatemember.status.status != INSTANCE_ROLE_NORMAL) {
return false;
}
if (instanceStatus->coordinatemember.status.db_state == INSTANCE_HA_STATE_NORMAL) {
return true;
} else if (instanceStatus->coordinatemember.status.db_state == INSTANCE_HA_STATE_NEED_REPAIR &&
(instanceStatus->coordinatemember.buildReason == INSTANCE_HA_DATANODE_BUILD_REASON_DISCONNECT ||
instanceStatus->coordinatemember.buildReason == INSTANCE_HA_DATANODE_BUILD_REASON_CONNECTING)) {
return true;
} else if (instanceStatus->coordinatemember.status.db_state == INSTANCE_HA_STATE_WAITING) {
return true;
} else {
return false;
}
}
static bool IsRoleDnAlive(const cm_local_replconninfo *localStatus)
{
if (localStatus->local_role != INSTANCE_ROLE_PRIMARY &&
localStatus->local_role != INSTANCE_ROLE_STANDBY) {
return false;
}
if (localStatus->db_state == INSTANCE_HA_STATE_NORMAL || localStatus->db_state == INSTANCE_HA_STATE_CATCH_UP) {
return true;
} else if (localStatus->db_state == INSTANCE_HA_STATE_NEED_REPAIR &&
(localStatus->buildReason == INSTANCE_HA_DATANODE_BUILD_REASON_DISCONNECT ||
localStatus->buildReason == INSTANCE_HA_DATANODE_BUILD_REASON_CONNECTING)) {
return true;
} else {
return false;
}
}
static void InitCnMajorityNum(void)
{
g_cnCount = 0;
(void)clock_gettime(CLOCK_MONOTONIC, &g_lastTime);
for (uint32 i = 0; i < g_dynamic_header->relationCount; i++) {
if (g_instance_role_group_ptr[i].instanceMember[0].instanceType == INSTANCE_TYPE_COORDINATE) {
g_cnCount++;
}
}
g_cnMajorityNum = HALF_COUNT(g_cnCount) + 1;
}
static inline void IncreaseCnMajorityNum(void)
{
/* maximum number of cnMajorityNum is (cnCount/2)+1 */
if (g_cnMajorityNum < (HALF_COUNT(g_cnCount) + 1)) {
g_cnMajorityNum++;
write_runlog(LOG, "[IncreaseCnMajorityNum] cn barrier majority num = %u\n", g_cnMajorityNum);
}
}
static void DecreaseCnMajorityNum(void)
{
const long oneMinute = 60;
(void)clock_gettime(CLOCK_MONOTONIC, &g_curTime);
long cnMajFailedTime = (g_curTime.tv_sec - g_lastTime.tv_sec);
if (cnMajFailedTime >= oneMinute) {
/* Minimum number of cnMajorityNum is 1 */
if (g_cnMajorityNum > 1) {
g_cnMajorityNum--;
write_runlog(LOG, "[DecreaseCnMajorityNum] cn barrier majority num = %u\n", g_cnMajorityNum);
}
(void)clock_gettime(CLOCK_MONOTONIC, &g_lastTime);
}
}
static bool IsCnMajority(uint32 barrierExistCnCount)
{
if (barrierExistCnCount >= g_cnMajorityNum) {
if (barrierExistCnCount > g_cnMajorityNum) {
IncreaseCnMajorityNum();
}
(void)clock_gettime(CLOCK_MONOTONIC, &g_lastTime);
return true;
} else {
DecreaseCnMajorityNum();
return false;
}
}
static bool IsDnMajority(uint32 barrierExistDnCount)
{
const uint32 twoReplication = 2;
if (g_dn_replication_num == twoReplication) {
return (barrierExistDnCount >= 1);
}
return (IS_MAJORITY(g_dn_replication_num, barrierExistDnCount));
}
static status_t RefreshQueryBarrierToDdb(char *minBarrier, uint32 barrierLen)
{
char key[MAX_PATH_LEN] = {0};
errno_t rc = snprintf_s(key, MAX_PATH_LEN, MAX_PATH_LEN - 1, "/%s/barrier/query_barrier", pw->pw_name);
securec_check_intval(rc, (void)rc);
status_t st = SetKV2Ddb(key, MAX_PATH_LEN, minBarrier, barrierLen, NULL);
if (st != CM_SUCCESS) {
write_runlog(ERROR, "[RefreshQueryBarrierToDdb] ddb set failed. key=%s,value=%s.\n", key, minBarrier);
}
return st;
}
static void GlobalQueryBarrierRefresh(char *minBarrier, uint32 barrierLen)
{
status_t st = RefreshQueryBarrierToDdb(minBarrier, barrierLen);
if (st != CM_SUCCESS) {
write_runlog(ERROR, "Refresh query barrier failed, value is %s\n", minBarrier);
return;
}
errno_t rc = memcpy_s(g_queryBarrier, barrierLen - 1, minBarrier, barrierLen - 1);
securec_check_errno(rc, (void)rc);
write_runlog(LOG, "Refresh query barrier success, value is %s\n", g_queryBarrier);
}
static inline void GlobalTargetBarrierRefresh(const char *queryBarrier, uint32 barrierLen)
{
errno_t rc;
/* set target value */
rc = memcpy_s(g_targetBarrier, barrierLen - 1, queryBarrier, barrierLen - 1);
securec_check_errno(rc, (void)rc);
write_runlog(LOG, "set target barrier value is %s\n", g_targetBarrier);
}
static void GetMinBarrierID(char *minBarrier, const char* instanceBarrierID, uint32 barrierLen, uint32 instanceId)
{
errno_t rc;
if (strlen(minBarrier) == 0) {
rc = memcpy_s(minBarrier, barrierLen - 1, instanceBarrierID, barrierLen - 1);
securec_check_intval(rc, (void)rc);
} else {
if (strncmp(instanceBarrierID, minBarrier, barrierLen - 1) < 0) {
rc = memcpy_s(minBarrier, barrierLen - 1, instanceBarrierID, barrierLen - 1);
securec_check_intval(rc, (void)rc);
}
}
write_runlog(LOG, "GetMinBarrierID instanceId:%u minBarrierID:%s, instanceBarrierID:%s\n",
instanceId, minBarrier, instanceBarrierID);
}
static void CalcMinBarrier(char *minBarrier, uint32 barrierLen)
{
char tmpMinBarrier[BARRIERLEN] = {0};
for (uint32 i = 0; i < g_dynamic_header->relationCount; i++) {
cm_instance_report_status *instanceStatus = &g_instance_group_report_status_ptr[i].instance_status;
/* compute CN nodes */
if (g_instance_role_group_ptr[i].instanceMember[0].instanceType == INSTANCE_TYPE_COORDINATE &&
IsRoleCnAlive(instanceStatus)) {
/* compute and get the min global barrier */
GetMinBarrierID(tmpMinBarrier, instanceStatus->coordinatemember.barrierID, barrierLen,
g_instance_role_group_ptr[i].instanceMember[0].instanceId);
}
/* compute DN nodes */
if (g_instance_role_group_ptr[i].instanceMember[0].instanceType != INSTANCE_TYPE_DATANODE) {
continue;
}
for (int j = 0; j < g_instance_role_group_ptr[i].count; j++) {
if (!IsRoleDnAlive(&instanceStatus->data_node_member[j].local_status)) {
continue;
}
GetMinBarrierID(tmpMinBarrier, instanceStatus->data_node_member[j].barrierID, barrierLen,
g_instance_role_group_ptr[i].instanceMember[j].instanceId);
}
}
errno_t rc = memcpy_s(minBarrier, BARRIERLEN - 1, tmpMinBarrier, BARRIERLEN - 1);
securec_check_intval(rc, (void)rc);
}
static bool IsNeedUpdateTargetBarrier()
{
uint32 barrierExistCnCount = 0;
for (uint32 i = 0; i < g_dynamic_header->relationCount; i++) {
cm_instance_report_status *instanceStatus = &g_instance_group_report_status_ptr[i].instance_status;
/* compute CN nodes */
if (g_instance_role_group_ptr[i].instanceMember[0].instanceType == INSTANCE_TYPE_COORDINATE &&
IsRoleCnAlive(instanceStatus)) {
/* all tested value is exists */
if (g_instance_group_report_status_ptr[i].instance_status.coordinatemember.is_barrier_exist) {
barrierExistCnCount++;
}
}
/* compute DN nodes */
if (g_instance_role_group_ptr[i].instanceMember[0].instanceType != INSTANCE_TYPE_DATANODE) {
continue;
}
uint32 barrierExistDnCount = 0;
for (int j = 0; j < g_instance_role_group_ptr[i].count; j++) {
if (!IsRoleDnAlive(&instanceStatus->data_node_member[j].local_status)) {
continue;
}
/* all tested value is exists */
if (instanceStatus->data_node_member[j].is_barrier_exist) {
barrierExistDnCount++;
}
}
if (!IsDnMajority(barrierExistDnCount)) {
write_runlog(LOG, "[IsNeedUpdateTargetBarrier] barrierExistDnCount=%u\n", barrierExistDnCount);
return false;
}
}
if (!IsCnMajority(barrierExistCnCount)) {
write_runlog(LOG, "[IsNeedUpdateTargetBarrier] barrierExistCnCount=%u\n", barrierExistCnCount);
return false;
}
return true;
}
static bool IsNeedUpdateQueryBarrier(const char *minBarrier, const char *queryBarrier, uint32 barrierLen)
{
/* minBarrierID can not smaller than queryBarrierID, should keeping barrier's increasement. */
if (strncmp(minBarrier, queryBarrier, barrierLen - 1) < 0) {
write_runlog(LOG, "[IsNeedUpdateQueryBarrier] minBarrier is smaller than queryBarrierID\n");
return false;
}
/* first update query barrier */
if (strlen(queryBarrier) == 0) {
write_runlog(LOG, "[IsNeedUpdateQueryBarrier] first update query barrier\n");
return true;
}
int count = 0;
for (uint32 i = 0; i < g_dynamic_header->relationCount; i++) {
/* compute CN nodes */
if (g_instance_role_group_ptr[i].instanceMember[0].instanceType == INSTANCE_TYPE_COORDINATE &&
IsRoleCnAlive(&g_instance_group_report_status_ptr[i].instance_status)) {
count++;
if (strncmp(g_instance_group_report_status_ptr[i].instance_status.coordinatemember.query_barrierId,
queryBarrier, barrierLen - 1) != 0) {
return false;
}
}
/* compute DN nodes */
if (g_instance_role_group_ptr[i].instanceMember[0].instanceType != INSTANCE_TYPE_DATANODE) {
continue;
}
cm_instance_report_status *instanceStatus = &g_instance_group_report_status_ptr[i].instance_status;
for (int j = 0; j < g_instance_role_group_ptr[i].count; j++) {
if (!IsRoleDnAlive(&instanceStatus->data_node_member[j].local_status)) {
continue;
}
count++;
/* compare to the etcd value, not same, so no need to update test value */
if (strncmp(instanceStatus->data_node_member[j].query_barrierId, queryBarrier, barrierLen - 1) != 0) {
return false;
}
}
}
if (count == 0) {
write_runlog(ERROR, "[IsNeedUpdateQueryBarrier] available instance in update query barrier is 0\n");
return false;
}
return true;
}
static status_t GenerateStopBarrier()
{
char key[MAX_PATH_LEN] = {0};
errno_t rc = snprintf_s(key, MAX_PATH_LEN, MAX_PATH_LEN - 1, "/%s/barrier/stop_barrier", pw->pw_name);
securec_check_intval(rc, (void)rc);
status_t st = SetKV2Ddb(key, MAX_PATH_LEN, g_targetBarrier, BARRIERLEN, NULL);
if (st != CM_SUCCESS) {
write_runlog(ERROR, "[GenerateStopBarrier] ddb set failed. key=%s,value=%s.\n", key, g_targetBarrier);
}
return st;
}
static status_t GetQueryBarrierValueFromDDb(char *value, uint32 len)
{
errno_t rc = memset_s(value, len, 0, len);
securec_check_errno(rc, (void)rc);
char queryBarrierKey[MAX_PATH_LEN] = {0};
rc = snprintf_s(queryBarrierKey, MAX_PATH_LEN, MAX_PATH_LEN - 1, "/%s/barrier/query_barrier", pw->pw_name);
securec_check_intval(rc, (void)rc);
DDB_RESULT dbResult = SUCCESS_GET_VALUE;
status_t st = GetKVFromDDb(queryBarrierKey, MAX_PATH_LEN, value, len, &dbResult);
if (st != CM_SUCCESS && dbResult != CAN_NOT_FIND_THE_KEY) {
write_runlog(ERROR, "get query_barrier info %s from ddb: %d\n", queryBarrierKey, (int)dbResult);
return st;
}
/* Ensure that the value of querybarrier in etcd is the same as that in cma */
rc = memcpy_s(g_queryBarrier, sizeof(g_queryBarrier), value, len);
securec_check_errno(rc, (void)rc);
return CM_SUCCESS;
}
void *DealGlobalBarrier(void *arg)
{
char minBarrier[BARRIERLEN] = {0};
char queryBarrier[BARRIERLEN] = {0};
bool needUpdateQueryVal = false;
bool needUpdateTargetVal = false;
write_runlog(LOG, "Starting DealGlobalBarrier thread.\n");
InitCnMajorityNum();
for (;;) {
if (g_HA_status->local_role != CM_SERVER_PRIMARY) {
cm_sleep(20);
continue;
}
/* get the old query value from DDb */
status_t st = GetQueryBarrierValueFromDDb(queryBarrier, BARRIERLEN);
if (st != CM_SUCCESS) {
write_runlog(LOG, "Get query_barrier From DDb failed\n");
cm_sleep(1);
continue;
}
/* scan all nodes to get the minBarrierID */
CalcMinBarrier(minBarrier, BARRIERLEN);
/* scan all nodes and check it's querybarrier */
needUpdateQueryVal = IsNeedUpdateQueryBarrier(minBarrier, queryBarrier, BARRIERLEN);
if (needUpdateQueryVal) {
GlobalQueryBarrierRefresh(minBarrier, BARRIERLEN);
needUpdateTargetVal = IsNeedUpdateTargetBarrier();
}
if (needUpdateTargetVal) {
GlobalTargetBarrierRefresh(queryBarrier, BARRIERLEN);
}
write_runlog(LOG, "get queryBarrier is %s, minBarrier is %s, needUpdateQueryVal: %d, needUpdateTargetVal: %d\n",
queryBarrier, minBarrier, needUpdateQueryVal, needUpdateTargetVal);
/* Generate stop_barrier when cluster failover */
bool isInClusterFailover = false;
bool isExistClusterMaintenance = ExistClusterMaintenance(&isInClusterFailover);
if (isExistClusterMaintenance && isInClusterFailover) {
st = GenerateStopBarrier();
if (st == CM_SUCCESS) {
write_runlog(LOG, "Generate Stop Barrier success, stop_barrier is %s\n", g_targetBarrier);
return NULL;
}
}
cm_sleep(1);
}
}
|
mbsharp85/knox | gateway-spi/src/main/java/org/apache/knox/gateway/services/config/client/RemoteConfigurationRegistryClient.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.knox.gateway.services.config.client;
import java.util.List;
public interface RemoteConfigurationRegistryClient {
String getAddress();
boolean isAuthenticationConfigured();
boolean entryExists(String path);
List<EntryACL> getACL(String path);
void setACL(String path, List<EntryACL> acls);
List<String> listChildEntries(String path);
String getEntryData(String path);
String getEntryData(String path, String encoding);
void createEntry(String path);
void createEntry(String path, String data);
void createEntry(String path, String data, String encoding);
int setEntryData(String path, String data);
int setEntryData(String path, String data, String encoding);
void deleteEntry(String path);
void addChildEntryListener(String path, ChildEntryListener listener) throws Exception;
void addEntryListener(String path, EntryListener listener) throws Exception;
void removeEntryListener(String path) throws Exception;
interface ChildEntryListener {
enum Type {
ADDED,
REMOVED,
UPDATED
}
void childEvent(RemoteConfigurationRegistryClient client, ChildEntryListener.Type type, String path);
}
interface EntryListener {
void entryChanged(RemoteConfigurationRegistryClient client, String path, byte[] data);
}
interface EntryACL {
String getId();
String getType();
Object getPermissions();
boolean canRead();
boolean canWrite();
}
}
|
sifbuilder/eon | eon-muon-anitem.js | <filename>eon-muon-anitem.js
/***********
* @eonMuonAnitem
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports)
: typeof define === 'function' && define.amd ? define(['exports'], factory)
: (factory((global.eonMuonAnitem = global.eonMuonAnitem || {})))
}(this, function (exports) {
'use strict'
async function eonitem (__eo = {}) {
let [
eonMuonSnap,
] = await Promise.all([
__eo('xs').b('eon-muon-snap'),
])
// https://stackoverflow.com/questions/728360/how-do-i-correctly-clone-a-javascript-object
const clone = obj => { // props.o
if (obj == null || typeof obj !== 'object') return obj
let copy = obj.constructor()
for (let attr in obj) {
if (obj.hasOwnProperty(attr)) copy[attr] = obj[attr]
}
return copy
}
const functor = (d, ...p) => (typeof d === 'function') ? d(...p) : d
// ............................. snapani
function snapaniDyn (ani, t) {
let r = Promise.resolve()
if (ani !== undefined) {
t = t || ani.eotim.unTime
r = Promise.resolve(eonMuonSnap.snap(ani, t))
}
return r
}
function snapani (ani, t) {
let r
if (ani !== undefined) {
t = t || ani.eotim.unTime
r = eonMuonSnap.snap(ani, t)
}
return r
}
// ............................. functorize
let functorize = function (anitem, t) {
let newAnitem = clone(anitem)
console.assert(anitem !== undefined)
if (newAnitem.eoload === undefined) newAnitem.eoload = {}
if (newAnitem.eomot === undefined) newAnitem.eomot = {}
let eomot = anitem.eomot || {}
if (eomot.ereform !== undefined) {
let ereform = functor(eomot.ereform, anitem) // ereform
newAnitem.eomot.ereform = ereform
} else if (eomot.conform !== undefined) {
let conform = functor(eomot.conform, anitem) // conform
newAnitem.eomot.conform = conform
} else if (eomot.proform !== undefined) {
let proform = functor(eomot.proform, anitem) // proform
newAnitem.eomot.proform = proform
}
if (anitem.eofold !== undefined) {
newAnitem.eofold = functor(anitem.eofold, anitem) // eofold
}
return newAnitem
}
// ............................. functorgeofold
let functorgeofold = function (anitem, t) {
let newAnitem = clone(anitem)
console.assert(anitem !== undefined)
console.assert(anitem.eofold !== undefined, anitem.eoric.uid + ' eofold undefined')
let eofold = functor((anitem.eofold), anitem) // eofold
newAnitem.eofold = eofold
return newAnitem
}
// ............................. functorpayload
let functorpayload = function (anitem, t) {
let newAnitem = clone(anitem)
console.assert(anitem !== undefined)
console.assert(anitem.eoload !== undefined, anitem.eoric.uid + ' eoload undefined')
if (anitem.eomot.ereform !== undefined) {
let ereform = functor(anitem.eomot.ereform, anitem) // ereform
newAnitem.eomot.ereform = ereform
} else if (anitem.eomot.conform !== undefined) {
let conform = functor(anitem.eomot.conform, anitem) // conform
newAnitem.eomot.conform = conform
} else if (anitem.eomot.proform !== undefined) {
let proform = functor(anitem.eomot.proform, anitem) // proform
newAnitem.eomot.proform = proform
}
return newAnitem
}
// ............................. enty
let enty = () => {}
enty.snapani = snapani
enty.functorize = functorize
enty.functorgeofold = functorgeofold
enty.functorpayload = functorpayload
return enty
}
exports.eonMuonAnitem = eonitem
}))
|
amane312/mct | mobile/screens/AI/Quiz.js | import React, {useState} from 'react';
import {
Text,
View,
StyleSheet,
TouchableOpacity,
SafeAreaView,
FlatList,
StatusBar,
Image,
ImageBackground,
Button,
ScrollView,
} from 'react-native';
import {ProgressBar} from '@react-native-community/progress-bar-android';
import {Icon} from 'react-native-elements';
import Questions from '../../data/questions';
import Answers from '../../data/answers';
const Quiz = (props) => {
const [q_no, setq_no] = useState(0);
const [quizScreen, setQuizScreen] = useState(true);
const [getDisabled, setDislabled] = useState(true);
const [ansArray, setAnsArray] = useState([]);
const renderItem = ({item}) => (
<Item label={item.label} name={item.name} option={item.option} />
);
const mcqSelect = (label) => {
if (q_no + 1 >= 50) {
setDislabled(false);
} else {
setq_no(q_no + 1);
}
setAnsArray((prev) => [...prev, label]);
};
const Item = ({option, name, label}) => (
<TouchableOpacity onPress={() => mcqSelect(label)} style={styles.answers}>
<Text style={styles.answerText}>{name + ') ' + option}</Text>
</TouchableOpacity>
);
return (
<>
{quizScreen ? (
<View style={styles.screen}>
<View style={styles.header}>
<Text style={{paddingBottom: 5}}>Асуулт: {q_no + 1}/50</Text>
<ProgressBar
styleAttr="Horizontal"
indeterminate={false}
progress={(q_no + 1) / 50}
/>
</View>
<View style={styles.mcqContainer}>
<View style={styles.question}>
<Text style={styles.questionText}>{Questions[q_no]}</Text>
</View>
<SafeAreaView style={styles.container}>
<FlatList
data={Answers}
renderItem={renderItem}
keyExtractor={(item) => item.name}
/>
</SafeAreaView>
</View>
<View style={styles.buttonContainer}>
<Icon
disabled={getDisabled}
reverse
name={'navigate-next'}
type="material"
color="#64e764"
onPress={() => setQuizScreen(false)}
/>
</View>
</View>
) : (
<View style={styles.resultContainer}>
<StatusBar barStyle="light-content" />
<View style={styles.resultHeader}>
<ImageBackground
source={require('../../assets/header.png')}
style={styles.imageBackground}>
<ScrollView style={styles.scrollView}>
<Image
style={{width: 220, height: 140}}
source={require('../../assets/badge.png')}
/>
<Text
style={{color: '#777777', fontWeight: 'bold', fontSize: 30}}>
{' '}Та бол{' '}
<Text style={{color: '#327a34'}}>Тэнүүлч</Text>
</Text>
<Text style={{color: 'red'}}>
Та амьдрал хаана ч хөтөлж, дасан зохицоход хурдан байдаг.
</Text>
<View
style={{
backgroundColor: '#cbdadd',
padding: 5,
margin: 5,
borderRadius: 5,
elevation: 5,
}}>
<Image
style={{width: 380, height: 200}}
source={require('../../assets/213.jpg')}
/>
<Text
style={{
color: '#eab649',
fontWeight: 'bold',
fontSize: 30,
}}>
Нээлттэй байдал
</Text>
<Text
style={{
color: '#eab649',
fontSize: 15,
}}>
Та амьдрал таныг хаана ч хөтөлж байгаа тэр газарт хурдан
дасан зохицох чадвартай. Нээлттэй байх нь таны шинэ, ер
бусын туршлагыг хүлээн авах хүсэл эрмэлзлийг илэрхийлдэг.
</Text>
<Text
style={{
fontSize: 12,
}}>
Таны илэн далангүй байдал нь уламжлал, танил байдлаас хүчээ
авч байгааг харуулж байна. Та дэлхий дээр бууж байгаа бөгөөд
хийсвэр эсвэл онолын сэтгэлгээнээс илүү тодорхой, шулуун
чиглэлийг илүүд үздэг.
</Text>
<Text
style={{
color: '#e76c42',
fontWeight: 'bold',
fontSize: 30,
}}>
Ухамсартай байдал
</Text>
<Text
style={{
color: '#e76c42',
fontSize: 15,
}}>
Мэдрэмж гэдэг нь бидний импульсийг хэрхэн удирдаж, түүнд
нийцүүлэн ажиллах тухай юм.
</Text>
<Text
style={{
fontSize: 12,
}}>
Та өдөөлтөөр шууд үйлдэл хийх дуртай бөгөөд энэ нь үр дүнтэй
бөгөөд хөгжилтэй байж болно. Гэхдээ хэт туйлшрах юм бол энэ
нь өөртөө болон бусдад урт хугацааны хүсээгүй үр дагаварт
хүргэж болзошгүй юм.
</Text>
<Text
style={{
color: '#d85068',
fontWeight: 'bold',
fontSize: 30,
}}>
Нэмэлт чадамж
</Text>
<Text
style={{
color: '#d85068',
fontSize: 15,
}}>
Таны нэмэлт чадамж нь гадаад ертөнцтэй хэр эрч хүчтэй
харьцаж байгааг илтгэнэ.
</Text>
<Text
style={{
fontSize: 12,
}}>
Таны гаднах хандлага өндөр байгаа нь таныг нийгмийн нөхцөл
байдал, үйл ажиллагаанаас эерэг сэтгэл хөдлөлийг мэдэрч
байгааг илтгэж байгаа тул тэдгээрийг хайж, өсөн дэвжих
магадлалтай.
</Text>
<Text
style={{
color: '#a8b64b',
fontWeight: 'bold',
fontSize: 30,
}}>
Харилцан ойлголцох
</Text>
<Text
style={{
color: '#a8b64b',
fontSize: 15,
}}>
Харилцан ойлголцох байдал нь нийгмийн бүлгийн аз жаргалын
төлөө санаа зовж байгааг илтгэнэ.
</Text>
<Text
style={{
fontSize: 12,
}}>
Та эргэлзэх эрүүл мэдрэмжтэй, хүчтэй шийдэмгий байдаг. Хүн
болгонд таалагдахгүй ч хийх шаардлагатай хатуу шийдвэрүүдийг
гаргахаас та цөхрөнгөө бардаггүй.
</Text>
<Text
style={{
color: '#55a3d3',
fontWeight: 'bold',
fontSize: 30,
}}>
Сэтгэл хөдлөлийн тогтвортой байдал
</Text>
<Text
style={{
color: '#55a3d3',
fontSize: 15,
}}>
Сэтгэл хөдлөлийн тогтвортой байдал нь үйл явдалд сэтгэл
хөдлөлөөр хандах хандлагыг харуулдаг.
</Text>
<Text
style={{
fontSize: 12,
}}>
Та сэтгэл хөдлөлийн хувьд идэвхтэй бөгөөд үйл явдалд хэсэг
хугацаанд сунжирсан мэдрэмжээр эрчимтэй хариу үйлдэл үзүүлэх
хандлагатай байдаг. Энэ нь таны тодорхой сэтгэх, өөрчлөлт,
стрессийг даван туулах чадварт нөлөөлж болно.
</Text>
<Text
style={{
color: '#327a34',
fontSize: 20,
}}>
Тэнүүлчний 10 шинж чанарыг энд оруулав:
</Text>
<Text
style={{
fontSize: 15,
}}>
1.Та ажлаа байнга сольдог (эсвэл харилцаа){'\n'}
2. Та шинэ газар руу байнга нүүж эсвэл аялдаг{'\n'}
3.Та тусгаар тогтнолоо чанга атгадаг {'\n'}
4.Та хурдан уйдаж эсвэл тайван бус болж, дараагийн "шинэ"
туршлага эсвэл адал явдлыг хүсэх болно{'\n'}
5.Та нийцлийг эсэргүүцэж, ганцаараа зогсоход бэлэн байна{' '}
{'\n'}
6.Та өөрчлөлтөд цэцэглэн хөгжиж , зогсонги байдалд санаа
зовдог
{'\n'}
7.Та шинэ байршил эсвэл нөхцөл байдалд шилжих логик шалтгаан
үргэлж хэрэггүй {'\n'}
8.Та "урсгалын дагуу явах"-ыг сайн хийдэг.
{'\n'}
9.Та тодорхой бус байдалд илүү таатай байдаг бөгөөд
амьдралын томоохон асуултуудад үргэлж хариулт шаарддаггүй.{' '}
{'\n'}
10.Та өөрийн амьдарч байсан янз бүрийн газруудад найзуудын
бүлгүүдтэй байх хандлагатай байдаг
</Text>
</View>
<View>
<Button title="Хадгалах" color="#64e764" />
<Button
onPress={() =>
props.navigation.navigate('Home Screen', {
ansArr: ansArray,
})
}
title="Эхлэлрүү буцах"
color="#64e764"
/>
</View>
</ScrollView>
</ImageBackground>
</View>
</View>
)}
</>
);
};
const styles = StyleSheet.create({
container: {
width: '100%',
height: '100%',
marginLeft: 40,
},
screen: {
width: '100%',
height: '100%',
flexDirection: 'column',
},
header: {
flex: 1,
padding: 15,
marginTop: 0,
},
mcqContainer: {
flex: 7,
alignItems: 'center',
},
question: {
width: '80%',
paddingBottom: 20,
},
answers: {
width: '80%',
padding: 15,
backgroundColor: '#9fc5e8',
marginLeft: 17,
marginVertical: 10,
borderRadius: 30,
},
questionText: {
fontWeight: 'bold',
fontSize: 20,
color: '#777',
},
answerText: {
color: 'white',
fontWeight: 'bold',
fontSize: 17,
},
buttonContainer: {
flex: 0.1,
alignItems: 'center',
justifyContent: 'flex-end',
marginBottom: 30,
marginLeft: '65%',
},
resultContainer: {
flex: 1,
backgroundColor: 'white',
justifyContent: 'center',
},
resultHeader: {
flex: 1,
},
imageBackground: {
justifyContent: 'center',
alignItems: 'center',
width: '100%',
height: '100%',
},
resultFooter: {
flex: 1,
padding: 20,
},
resultBtnContainer: {
paddingVertical: 10,
borderRadius: 100,
alignItems: 'center',
justifyContent: 'center',
},
});
export default Quiz;
|
YXChan/chronus | chronus-metadata-api/src/main/java/com/qihoo/finance/chronus/metadata/api/assign/enums/ExecutorLoadPhaseEnum.java | <reponame>YXChan/chronus
package com.qihoo.finance.chronus.metadata.api.assign.enums;
/**
* Created by xiongpu on 2019/9/7.
*/
public enum ExecutorLoadPhaseEnum {
RESET(-1, "需重新加载"),
INIT(0, "初始化"),
REMOVE(1, "变更调度需移除"),
ADD(2, "变更调度需补充"),
FINISH(3, "处理完成"),
OFFLINE(-9, "节点下线"),
;
private Integer phase;
private String desc;
ExecutorLoadPhaseEnum(Integer phase, String desc) {
this.phase = phase;
this.desc = desc;
}
public Integer getPhase() {
return phase;
}
public String getDesc() {
return desc;
}
public static boolean isResetPhase(Integer phase) {
return phase != null && ExecutorLoadPhaseEnum.RESET.getPhase().intValue() == phase.intValue();
}
public static boolean isRemovePhase(Integer phase) {
return phase != null && ExecutorLoadPhaseEnum.REMOVE.getPhase().intValue() == phase.intValue();
}
public static boolean isInitPhase(Integer phase) {
return phase != null && ExecutorLoadPhaseEnum.INIT.getPhase().intValue() == phase.intValue();
}
public static boolean isAddPhase(Integer phase) {
return phase != null && ExecutorLoadPhaseEnum.ADD.getPhase().intValue() == phase.intValue();
}
public static boolean isFinishPhase(Integer phase) {
return phase != null && ExecutorLoadPhaseEnum.FINISH.getPhase().intValue() == phase.intValue();
}
public static boolean isOfflinePhase(Integer phase) {
return phase != null && ExecutorLoadPhaseEnum.OFFLINE.getPhase().intValue() == phase.intValue();
}
public boolean isEquals(Integer phase) {
return phase != null && this.getPhase().intValue() == phase.intValue();
}
}
|
chuckmersereau/api_practice | spec/services/tnt_import/xml_reader_spec.rb | <filename>spec/services/tnt_import/xml_reader_spec.rb
require 'rails_helper'
describe TntImport::Xml do
let(:tnt_import) { create(:tnt_import, override: true) }
let(:xml_reader) { TntImport::XmlReader.new(tnt_import) }
describe 'initialize' do
it 'initializes' do
expect(xml_reader).to be_a TntImport::XmlReader
end
end
describe '#parsed_xml' do
context 'unparsable characters' do
let(:test_file_path) { Rails.root.join('spec/fixtures/tnt/tnt_unparsable_characters.xml') }
let(:tnt_import) { create(:tnt_import, override: true, file: File.new(test_file_path)) }
it 'verify that the test file has unparsable characters' do
contents = File.open(test_file_path).read
expect(TntImport::XmlReader::UNPARSABLE_UTF8_CHARACTERS).to be_present
expect(TntImport::XmlReader::UNPARSABLE_UTF8_CHARACTERS.all? do |unparsable_utf8_character|
contents.include?(unparsable_utf8_character)
end).to eq(true)
end
it 'handles unparsable utf8 characters' do
# If the xml is not parsed properly we expect the number of returned tables to be less than 21
expect(xml_reader.parsed_xml.tables.keys.size).to eq(21)
end
end
end
end
|
doitintl/dataflow-bigquery-schema-migrator-insert | src/main/java/com/doit/schemamigration/Parsers/JsonToTableRow.java | <reponame>doitintl/dataflow-bigquery-schema-migrator-insert
package com.doit.schemamigration.Parsers;
import com.google.api.services.bigquery.model.TableRow;
import com.owlike.genson.Genson;
import com.owlike.genson.JsonBindingException;
import com.owlike.genson.stream.JsonStreamException;
import java.util.HashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class JsonToTableRow {
static final Logger logger = LoggerFactory.getLogger(JsonToTableRow.class);
public static TableRow convertFromString(final String json) {
final TableRow out = new TableRow();
final Genson genson = new Genson();
logger.debug("Incoming jsonString: {}", json);
try {
final HashMap<String, Object> convertedObject = genson.deserialize(json, HashMap.class);
out.putAll(convertedObject);
} catch (JsonBindingException | JsonStreamException | NullPointerException e) {
logger.debug("Failed to parse message:\n {}\nException thrown: {}", json, e);
}
logger.debug("Outgoing tablerow: {}", out.toString());
return out;
}
}
|
muthukumaravel7/armnn | Documentation/structarmnn_1_1_resolve_type_impl_3_01_data_type_1_1_boolean_01_4.js | <filename>Documentation/structarmnn_1_1_resolve_type_impl_3_01_data_type_1_1_boolean_01_4.js
var structarmnn_1_1_resolve_type_impl_3_01_data_type_1_1_boolean_01_4 =
[
[ "Type", "structarmnn_1_1_resolve_type_impl_3_01_data_type_1_1_boolean_01_4.xhtml#a4ead9bff73e6b8e9843a264a3c9ef8f8", null ]
]; |
kreta/Kreta | CompositeUi/src/views/component/NavigableListItemLink.js | <filename>CompositeUi/src/views/component/NavigableListItemLink.js
/*
* This file is part of the Kreta package.
*
* (c) <NAME> <<EMAIL>>
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
import React from 'react';
import {Link} from 'react-router';
class NavigableListItemLink extends React.Component {
static defaultProps = {
selected: false,
};
static contextTypes = {
xSelected: React.PropTypes.number,
};
render() {
const {children, index, to} = this.props,
selected = this.context.xSelected === index;
return (
<Link
className={`navigable-list-item-link ${selected
? ' navigable-list-item-link--selected'
: ''}`}
to={to}
>
{children}
</Link>
);
}
}
export default NavigableListItemLink;
|
ttrifonov/horizon | horizon/horizon/dashboards/syspanel/instances/tests.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import http
from django.core.urlresolvers import reverse
from mox import IsA
from novaclient import exceptions as novaclient_exceptions
from horizon import api
from horizon import test
class InstanceViewTest(test.BaseAdminViewTests):
def setUp(self):
super(InstanceViewTest, self).setUp()
self.server = api.Server(None, self.request)
self.server.id = 1
self.server.name = 'serverName'
self.server.status = "ACTIVE"
self.server.flavor = {'id': '1'}
self.flavor = api.nova.Flavor(None)
self.flavor.id = '1'
self.flavor.ram = 512
self.flavor.vcpus = 512
self.flavor.disk = 1
self.servers = (self.server,)
self.flavors = (self.flavor,)
def test_index(self):
self.mox.StubOutWithMock(api.nova, 'server_list')
self.mox.StubOutWithMock(api.nova, 'flavor_list')
api.nova.server_list(IsA(http.HttpRequest),
all_tenants=True).AndReturn(self.servers)
api.nova.flavor_list(IsA(http.HttpRequest)).AndReturn(self.flavors)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:syspanel:instances:index'))
self.assertTemplateUsed(res, 'syspanel/instances/index.html')
instances = res.context['table'].data
self.assertItemsEqual(instances, self.servers)
def test_index_server_list_exception(self):
self.mox.StubOutWithMock(api.nova, 'server_list')
self.mox.StubOutWithMock(api.nova, 'flavor_list')
exception = novaclient_exceptions.ClientException('apiException')
api.nova.server_list(IsA(http.HttpRequest),
all_tenants=True).AndRaise(exception)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:syspanel:instances:index'))
self.assertTemplateUsed(res, 'syspanel/instances/index.html')
self.assertEqual(len(res.context['instances_table'].data), 0)
|
erichuang1994/leetcode-solution | 901-1000/997. Find the Town Judge.cpp | <reponame>erichuang1994/leetcode-solution
class Solution
{
public:
int findJudge(int N, vector<vector<int>> &trust)
{
vector<int> in(N + 1), out(N + 1);
for (auto &t : trust)
{
in[t[1]]++;
out[t[0]]++;
}
for (int i = 1; i < N + 1; ++i)
{
if (in[i] == N - 1 && out[i] == 0)
{
return i;
}
}
return -1;
}
}; |
charithe/beam | learning/katas/java/Windowing/Fixed Time Window/Fixed Time Window/test/org/apache/beam/learning/katas/windowing/fixedwindow/WindowedEvent.java | <reponame>charithe/beam
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.learning.katas.windowing.fixedwindow;
import java.io.Serializable;
import java.util.Objects;
public class WindowedEvent implements Serializable {
private String event;
private Long count;
private String window;
public WindowedEvent(String event, Long count, String window) {
this.event = event;
this.count = count;
this.window = window;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
WindowedEvent that = (WindowedEvent) o;
return event.equals(that.event) &&
count.equals(that.count) &&
window.equals(that.window);
}
@Override
public int hashCode() {
return Objects.hash(event, count, window);
}
@Override
public String toString() {
return "WindowedEvent{" +
"event='" + event + '\'' +
", count=" + count +
", window='" + window + '\'' +
'}';
}
}
|
kuzhamuratov/deep-landscape | superres/src/models/srgan.py | <filename>superres/src/models/srgan.py<gh_stars>10-100
import logging
from collections import OrderedDict
import torch
import torch.nn as nn
from torch.nn.parallel import DataParallel, DistributedDataParallel
import models.networks as networks
import models.lr_scheduler as lr_scheduler
from .base_model import BaseModel
from models.modules.loss import GANLoss, LMaskLoss
logger = logging.getLogger('base')
class SRGANModel(BaseModel):
def __init__(self, opt):
super(SRGANModel, self).__init__(opt)
if opt['dist']:
self.rank = torch.distributed.get_rank()
else:
self.rank = -1 # non dist training
train_opt = opt['train']
self.train_opt = train_opt
self.opt = opt
self.segmentor = None
# define networks and load pretrained models
self.netG = networks.define_G(opt).to(self.device)
if opt['dist']:
self.netG = DistributedDataParallel(self.netG, device_ids=[torch.cuda.current_device()])
else:
self.netG = DataParallel(self.netG)
if self.is_train:
self.netD = networks.define_D(opt).to(self.device)
if train_opt.get("gan_video_weight", 0) > 0:
self.net_video_D = networks.define_video_D(opt).to(self.device)
if opt['dist']:
self.netD = DistributedDataParallel(self.netD,
device_ids=[torch.cuda.current_device()])
if train_opt.get("gan_video_weight", 0) > 0:
self.net_video_D = DistributedDataParallel(self.net_video_D,
device_ids=[torch.cuda.current_device()])
else:
self.netD = DataParallel(self.netD)
if train_opt.get("gan_video_weight", 0) > 0:
self.net_video_D = DataParallel(self.net_video_D)
self.netG.train()
self.netD.train()
if train_opt.get("gan_video_weight", 0) > 0:
self.net_video_D.train()
# define losses, optimizer and scheduler
if self.is_train:
# G pixel loss
if train_opt['pixel_weight'] > 0:
l_pix_type = train_opt['pixel_criterion']
if l_pix_type == 'l1':
self.cri_pix = nn.L1Loss().to(self.device)
elif l_pix_type == 'l2':
self.cri_pix = nn.MSELoss().to(self.device)
else:
raise NotImplementedError('Loss type [{:s}] not recognized.'.format(l_pix_type))
self.l_pix_w = train_opt['pixel_weight']
else:
logger.info('Remove pixel loss.')
self.cri_pix = None
# Pixel mask loss
if train_opt.get("pixel_mask_weight", 0) > 0:
l_pix_type = train_opt['pixel_mask_criterion']
self.cri_pix_mask = LMaskLoss(l_pix_type=l_pix_type, segm_mask=train_opt['segm_mask']).to(self.device)
self.l_pix_mask_w = train_opt['pixel_mask_weight']
else:
logger.info('Remove pixel mask loss.')
self.cri_pix_mask = None
# G feature loss
if train_opt['feature_weight'] > 0:
l_fea_type = train_opt['feature_criterion']
if l_fea_type == 'l1':
self.cri_fea = nn.L1Loss().to(self.device)
elif l_fea_type == 'l2':
self.cri_fea = nn.MSELoss().to(self.device)
else:
raise NotImplementedError('Loss type [{:s}] not recognized.'.format(l_fea_type))
self.l_fea_w = train_opt['feature_weight']
else:
logger.info('Remove feature loss.')
self.cri_fea = None
if self.cri_fea: # load VGG perceptual loss
self.netF = networks.define_F(opt, use_bn=False).to(self.device)
if opt['dist']:
self.netF = DistributedDataParallel(self.netF,
device_ids=[torch.cuda.current_device()])
else:
self.netF = DataParallel(self.netF)
# GD gan loss
self.cri_gan = GANLoss(train_opt['gan_type'], 1.0, 0.0).to(self.device)
self.l_gan_w = train_opt['gan_weight']
# Video gan weight
if train_opt.get("gan_video_weight", 0) > 0:
self.cri_video_gan = GANLoss(train_opt['gan_video_type'], 1.0, 0.0).to(self.device)
self.l_gan_video_w = train_opt['gan_video_weight']
# can't use optical flow with i and i+1 because we need i+2 lr to calculate i+1 oflow
if 'train' in self.opt['datasets'].keys():
key = "train"
else:
key = 'test_1'
assert self.opt['datasets'][key]['optical_flow_with_ref'] == True, f"Current value = {self.opt['datasets'][key]['optical_flow_with_ref']}"
# D_update_ratio and D_init_iters
self.D_update_ratio = train_opt['D_update_ratio'] if train_opt['D_update_ratio'] else 1
self.D_init_iters = train_opt['D_init_iters'] if train_opt['D_init_iters'] else 0
# optimizers
# G
wd_G = train_opt['weight_decay_G'] if train_opt['weight_decay_G'] else 0
optim_params = []
for k, v in self.netG.named_parameters(): # can optimize for a part of the model
if v.requires_grad:
optim_params.append(v)
else:
if self.rank <= 0:
logger.warning('Params [{:s}] will not optimize.'.format(k))
self.optimizer_G = torch.optim.Adam(optim_params, lr=train_opt['lr_G'],
weight_decay=wd_G,
betas=(train_opt['beta1_G'], train_opt['beta2_G']))
self.optimizers.append(self.optimizer_G)
# D
wd_D = train_opt['weight_decay_D'] if train_opt['weight_decay_D'] else 0
self.optimizer_D = torch.optim.Adam(self.netD.parameters(), lr=train_opt['lr_D'],
weight_decay=wd_D,
betas=(train_opt['beta1_D'], train_opt['beta2_D']))
self.optimizers.append(self.optimizer_D)
# Video D
if train_opt.get("gan_video_weight", 0) > 0:
self.optimizer_video_D = torch.optim.Adam(self.net_video_D.parameters(), lr=train_opt['lr_D'],
weight_decay=wd_D,
betas=(train_opt['beta1_D'], train_opt['beta2_D']))
self.optimizers.append(self.optimizer_video_D)
# schedulers
if train_opt['lr_scheme'] == 'MultiStepLR':
for optimizer in self.optimizers:
self.schedulers.append(
lr_scheduler.MultiStepLR_Restart(optimizer, train_opt['lr_steps'],
restarts=train_opt['restarts'],
weights=train_opt['restart_weights'],
gamma=train_opt['lr_gamma'],
clear_state=train_opt['clear_state']))
elif train_opt['lr_scheme'] == 'CosineAnnealingLR_Restart':
for optimizer in self.optimizers:
self.schedulers.append(
lr_scheduler.CosineAnnealingLR_Restart(
optimizer, train_opt['T_period'], eta_min=train_opt['eta_min'],
restarts=train_opt['restarts'], weights=train_opt['restart_weights']))
else:
raise NotImplementedError('MultiStepLR learning rate scheme is enough.')
self.log_dict = OrderedDict()
self.print_network() # print network
self.load() # load G and D if needed
def feed_data(self, data, need_GT=True):
self.img_path = data['GT_path']
self.var_L = data['LQ'].to(self.device) # LQ
if need_GT:
self.var_H = data['GT'].to(self.device) # GT
if self.train_opt.get("use_HR_ref"):
self.var_HR_ref = data['img_reference'].to(self.device)
if "LQ_next" in data.keys():
self.var_L_next = data['LQ_next'].to(self.device)
if "GT_next" in data.keys():
self.var_H_next = data['GT_next'].to(self.device)
self.var_video_H = torch.cat([data['GT'].unsqueeze(2), data['GT_next'].unsqueeze(2)], dim=2).to(self.device)
else:
self.var_L_next = None
def optimize_parameters(self, step):
# G
for p in self.netD.parameters():
p.requires_grad = False
self.optimizer_G.zero_grad()
args = [self.var_L]
if self.train_opt.get('use_HR_ref'):
args += [self.var_HR_ref]
if self.var_L_next is not None:
args += [self.var_L_next]
self.fake_H, self.binary_mask = self.netG(*args)
#Video Gan
if self.opt['train'].get("gan_video_weight", 0) > 0:
with torch.no_grad():
args = [self.var_L, self.var_HR_ref, self.var_L_next]
self.fake_H_next, self.binary_mask_next = self.netG(*args)
l_g_total = 0
if step % self.D_update_ratio == 0 and step > self.D_init_iters:
if self.cri_pix: # pixel loss
l_g_pix = self.l_pix_w * self.cri_pix(self.fake_H, self.var_H)
l_g_total += l_g_pix
if self.cri_pix_mask:
l_g_pix_mask = self.l_pix_mask_w * self.cri_pix_mask(self.fake_H, self.var_H, self.var_HR_ref)
l_g_total += l_g_pix_mask
if self.cri_fea: # feature loss
real_fea = self.netF(self.var_H).detach()
fake_fea = self.netF(self.fake_H)
l_g_fea = self.l_fea_w * self.cri_fea(fake_fea, real_fea)
l_g_total += l_g_fea
# Image Gan
if self.opt['network_D'] == "discriminator_vgg_128_mask":
import torch.nn.functional as F
from models.modules import psina_seg
if self.segmentor is None:
self.segmentor = psina_seg.base.SegmentationModule(encode='stationary_probs').to(self.device)
self.segmentor = self.segmentor.eval()
lr = F.interpolate(self.var_H, scale_factor=0.25, mode='nearest')
with torch.no_grad():
binary_mask = (1 - self.segmentor.predict(lr[:, [2,1,0],::]))
binary_mask = F.interpolate(binary_mask, scale_factor=4, mode='nearest')
pred_g_fake = self.netD(self.fake_H, self.fake_H *(1-binary_mask), self.var_HR_ref, binary_mask * self.var_HR_ref)
else:
pred_g_fake = self.netD(self.fake_H)
if self.opt['train']['gan_type'] == 'gan':
l_g_gan = self.l_gan_w * self.cri_gan(pred_g_fake, True)
elif self.opt['train']['gan_type'] == 'ragan':
if self.opt['network_D'] == "discriminator_vgg_128_mask":
pred_g_fake = self.netD(self.var_H, self.var_H *(1-binary_mask), self.var_HR_ref, binary_mask * self.var_HR_ref)
else:
pred_d_real = self.netD(self.var_H)
pred_d_real = pred_d_real.detach()
l_g_gan = self.l_gan_w * (
self.cri_gan(pred_d_real - torch.mean(pred_g_fake), False) +
self.cri_gan(pred_g_fake - torch.mean(pred_d_real), True)) / 2
l_g_total += l_g_gan
#Video Gan
if self.opt['train'].get("gan_video_weight", 0) > 0:
self.fake_video_H = torch.cat([self.fake_H.unsqueeze(2), self.fake_H_next.unsqueeze(2)], dim=2)
pred_g_video_fake = self.net_video_D(self.fake_video_H)
if self.opt['train']['gan_video_type'] == 'gan':
l_g_video_gan = self.l_gan_video_w * self.cri_video_gan(pred_g_video_fake, True)
elif self.opt['train']['gan_type'] == 'ragan':
pred_d_video_real = self.net_video_D(self.var_video_H)
pred_d_video_real = pred_d_video_real.detach()
l_g_video_gan = self.l_gan_video_w * (
self.cri_video_gan(pred_d_video_real - torch.mean(pred_g_video_fake), False) +
self.cri_video_gan(pred_g_video_fake - torch.mean(pred_d_video_real), True)) / 2
l_g_total += l_g_video_gan
# OFLOW regular
if self.binary_mask is not None:
l_g_total += 1* self.binary_mask.mean()
l_g_total.backward()
self.optimizer_G.step()
# D
for p in self.netD.parameters():
p.requires_grad = True
if self.opt['train'].get("gan_video_weight", 0) > 0:
for p in self.net_video_D.parameters():
p.requires_grad = True
# optimize Image D
self.optimizer_D.zero_grad()
l_d_total = 0
pred_d_real = self.netD(self.var_H)
pred_d_fake = self.netD(self.fake_H.detach()) # detach to avoid BP to G
if self.opt['train']['gan_type'] == 'gan':
l_d_real = self.cri_gan(pred_d_real, True)
l_d_fake = self.cri_gan(pred_d_fake, False)
l_d_total = l_d_real + l_d_fake
elif self.opt['train']['gan_type'] == 'ragan':
l_d_real = self.cri_gan(pred_d_real - torch.mean(pred_d_fake), True)
l_d_fake = self.cri_gan(pred_d_fake - torch.mean(pred_d_real), False)
l_d_total = (l_d_real + l_d_fake) / 2
l_d_total.backward()
self.optimizer_D.step()
# optimize Video D
if self.opt['train'].get("gan_video_weight", 0) > 0:
self.optimizer_video_D.zero_grad()
l_d_video_total = 0
pred_d_video_real = self.net_video_D(self.var_video_H)
pred_d_video_fake = self.net_video_D(self.fake_video_H.detach()) # detach to avoid BP to G
if self.opt['train']['gan_video_type'] == 'gan':
l_d_video_real = self.cri_video_gan(pred_d_video_real, True)
l_d_video_fake = self.cri_video_gan(pred_d_video_fake, False)
l_d_video_total = l_d_video_real + l_d_video_fake
elif self.opt['train']['gan_video_type'] == 'ragan':
l_d_video_real = self.cri_video_gan(pred_d_video_real - torch.mean(pred_d_video_fake), True)
l_d_video_fake = self.cri_video_gan(pred_d_video_fake - torch.mean(pred_d_video_real), False)
l_d_video_total = (l_d_video_real + l_d_video_fake) / 2
l_d_video_total.backward()
self.optimizer_video_D.step()
# set log
if step % self.D_update_ratio == 0 and step > self.D_init_iters:
if self.cri_pix:
self.log_dict['l_g_pix'] = l_g_pix.item()
if self.cri_fea:
self.log_dict['l_g_fea'] = l_g_fea.item()
self.log_dict['l_g_gan'] = l_g_gan.item()
self.log_dict['l_d_real'] = l_d_real.item()
self.log_dict['l_d_fake'] = l_d_fake.item()
self.log_dict['D_real'] = torch.mean(pred_d_real.detach())
self.log_dict['D_fake'] = torch.mean(pred_d_fake.detach())
if self.opt['train'].get("gan_video_weight", 0) > 0:
self.log_dict['D_video_real'] = torch.mean(pred_d_video_real.detach())
self.log_dict['D_video_fake'] = torch.mean(pred_d_video_fake.detach())
def test(self):
self.netG.eval()
with torch.no_grad():
args = [self.var_L]
if self.train_opt.get('use_HR_ref'):
args += [self.var_HR_ref]
if self.var_L_next is not None:
args += [self.var_L_next]
self.fake_H, self.binary_mask = self.netG(*args)
self.netG.train()
def get_current_log(self):
return self.log_dict
def get_current_visuals(self, need_GT=True):
out_dict = OrderedDict()
out_dict['LQ'] = self.var_L.detach()[0].float().cpu()
out_dict['SR'] = self.fake_H.detach()[0].float().cpu()
if self.binary_mask is not None:
out_dict['binary_mask'] = self.binary_mask.detach()[0].float().cpu()
if need_GT:
out_dict['GT'] = self.var_H.detach()[0].float().cpu()
return out_dict
def print_network(self):
# Generator
s, n = self.get_network_description(self.netG)
if isinstance(self.netG, nn.DataParallel) or isinstance(self.netG, DistributedDataParallel):
net_struc_str = '{} - {}'.format(self.netG.__class__.__name__,
self.netG.module.__class__.__name__)
else:
net_struc_str = '{}'.format(self.netG.__class__.__name__)
if self.rank <= 0:
logger.info('Network G structure: {}, with parameters: {:,d}'.format(net_struc_str, n))
logger.info(s)
if self.is_train:
# Discriminator
s, n = self.get_network_description(self.netD)
if isinstance(self.netD, nn.DataParallel) or isinstance(self.netD,
DistributedDataParallel):
net_struc_str = '{} - {}'.format(self.netD.__class__.__name__,
self.netD.module.__class__.__name__)
else:
net_struc_str = '{}'.format(self.netD.__class__.__name__)
if self.rank <= 0:
logger.info('Network D structure: {}, with parameters: {:,d}'.format(
net_struc_str, n))
logger.info(s)
if self.cri_fea: # F, Perceptual Network
s, n = self.get_network_description(self.netF)
if isinstance(self.netF, nn.DataParallel) or isinstance(
self.netF, DistributedDataParallel):
net_struc_str = '{} - {}'.format(self.netF.__class__.__name__,
self.netF.module.__class__.__name__)
else:
net_struc_str = '{}'.format(self.netF.__class__.__name__)
if self.rank <= 0:
logger.info('Network F structure: {}, with parameters: {:,d}'.format(
net_struc_str, n))
logger.info(s)
def load(self):
# G
load_path_G = self.opt['path']['pretrain_model_G']
if load_path_G is not None:
logger.info('Loading model for G [{:s}] ...'.format(load_path_G))
self.load_network(load_path_G, self.netG, self.opt['path']['pretrain_model_G_strict_load'])
if self.opt['network_G'].get("pretrained_net") is not None:
self.netG.module.load_pretrained_net_weights(self.opt['network_G']['pretrained_net'])
# D
load_path_D = self.opt['path']['pretrain_model_D']
if self.opt['is_train'] and load_path_D is not None:
logger.info('Loading model for D [{:s}] ...'.format(load_path_D))
self.load_network(load_path_D, self.netD, self.opt['path']['pretrain_model_D_strict_load'])
# Video D
if self.opt['train'].get("gan_video_weight", 0) > 0:
load_path_video_D = self.opt['path'].get("pretrain_model_video_D")
if self.opt['is_train'] and load_path_video_D is not None:
self.load_network(load_path_video_D, self.net_video_D, self.opt['path']['pretrain_model_video_D_strict_load'])
def save(self, iter_step):
self.save_network(self.netG, 'G', iter_step)
self.save_network(self.netD, 'D', iter_step)
if self.opt['train'].get("gan_video_weight", 0) > 0:
self.save_network(self.net_video_D, 'video_D', iter_step)
@staticmethod
def _freeze_net(network):
for p in network.parameters():
p.requires_grad = False
return network
@staticmethod
def _unfreeze_net(network):
for p in network.parameters():
p.requires_grad = True
return network
def freeze(self, G, D):
if G:
self.netG.module.net = self._freeze_net(self.netG.module.net)
if D:
self.netD.module = self._freeze_net(self.netD.module)
def unfreeze(self, G, D):
if G:
self.netG.module.net = self._unfreeze_net(self.netG.module.net)
if D:
self.netD.module = self._unfreeze_net(self.netD.module)
|
VolgaCTF/volgactf-qualifier-backend | src/controllers/mail/smtp.js | const logger = require('../../utils/logger')
const nodemailer = require('nodemailer')
class SMTPController {
static sendEmail (message, recipientEmail, recipientName, messageId) {
return new Promise(function (resolve, reject) {
const transporter = nodemailer.createTransport({
host: process.env.SMTP_HOST,
port: parseInt(process.env.SMTP_PORT, 10),
secure: process.env.SMTP_SECURE === 'yes',
auth: {
user: process.env.SMTP_USERNAME,
pass: process.env.SMTP_PASSWORD
}
})
const headers = JSON.parse(process.env.SMTP_HEADERS_JSON || '{}')
headers['X-VolgaCTF-Qualifier-Message-Id'] = messageId
const data = {
from: `${process.env.VOLGACTF_QUALIFIER_EMAIL_SENDER_NAME} <${process.env.VOLGACTF_QUALIFIER_EMAIL_SENDER_ADDRESS}>`,
to: `${recipientName} <${recipientEmail}>`,
subject: message.subject,
text: message.plain,
html: message.html,
headers: headers
}
transporter.sendMail(data, function (err, info) {
if (err) {
logger.error(err)
reject(err)
} else {
resolve(info)
}
})
})
}
}
module.exports = SMTPController
|
willmexe/opuntiaOS | kernel/include/mem/bits/zone.h | /*
* Copyright (C) 2020-2022 The opuntiaOS Project Authors.
* + Contributed by <NAME> <<EMAIL>>
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#ifndef _KERNEL_MEM_BITS_ZONE_H
#define _KERNEL_MEM_BITS_ZONE_H
#include <mem/bits/mmu.h>
enum ZONE_FLAGS {
ZONE_WRITABLE = MMU_FLAG_PERM_WRITE,
ZONE_READABLE = MMU_FLAG_PERM_READ,
ZONE_EXECUTABLE = MMU_FLAG_PERM_EXEC,
ZONE_NOT_CACHEABLE = MMU_FLAG_UNCACHED,
ZONE_COW = MMU_FLAG_COW,
ZONE_USER = MMU_FLAG_NONPRIV,
};
enum ZONE_TYPES {
ZONE_TYPE_NULL = 0x0,
ZONE_TYPE_CODE = 0x1,
ZONE_TYPE_DATA = 0x2,
ZONE_TYPE_STACK = 0x4,
ZONE_TYPE_BSS = 0x8,
ZONE_TYPE_DEVICE = 0x10,
ZONE_TYPE_MAPPED = 0x20,
ZONE_TYPE_MAPPED_FILE_PRIVATLY = 0x40,
ZONE_TYPE_MAPPED_FILE_SHAREDLY = 0x80,
};
#endif // _KERNEL_MEM_BITS_ZONE_H |
hrajput89/kv_engine | include/cbsasl/logging.h | /* -*- Mode: C++; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- */
/*
* Copyright 2017 Couchbase, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <string>
namespace cb {
namespace sasl {
// Forward decl
class Context;
namespace logging {
enum class Level {
/**
* Log an error situation. Entries logged at this level contains a
* UUID if it is bound to a connection.
*/
Error,
/**
* The log message is for an authentication failure
*/
Fail,
/**
* This is a non-fatal warning
*/
Warning,
/**
* In informational message produced by the library
*/
Notice,
/**
* Debug message
*/
Debug,
/**
* Trace of internal protocol
*/
Trace
};
/**
* The log callback method the user of the library may configure. It
* is called every time with the message to add to the log if the level
* is enabled.
*/
using LogCallback = void (*)(Level level, const std::string& message);
/**
* Specify the callback function to use for logging
*/
void set_log_callback(LogCallback callback);
/**
* Perform logging within the CBSASL library for components which isn't bound
* to a given client.
*
* @param level
* @param message
*/
void log(Level level, const std::string& message);
/**
* Perform logging related to a given client.
*
* @param level
* @param message
*/
void log(Context* server, Level level, const std::string& message);
} // namespace logging
} // namespace sasl
} // namespace cb
|
vladimirg-dev/lazybucks-cookie-manager | spec/ui.spec.js | <reponame>vladimirg-dev/lazybucks-cookie-manager<filename>spec/ui.spec.js<gh_stars>10-100
"use strict"
const Promise = require ("bluebird")
const Browser = require ("../utils/Browser")
const assert = require ("assert")
describe ( "User Interface", function () {
this.timeout ( 2E5 )
describe ( "Search", function () {
it ( "Can type in search box", function () {
return Browser ( async chrome => await Promise.resolve ()
.then ( () => chrome.goto ( "index.html" ) )
.then ( () => chrome.page.type ( "#search", "Hello World" ) )
.then ( () => chrome.page.$("#search") )
.then ( el => assert.ok ( el, "Could not type in search box" ) )
.then ( () => chrome.close () )
)
})
// it ( "Can search with case-sensitive regular search", () => assert.equal ( true, false ) )
// it ( "Can search with case-insensitive regular search", () => assert.equal ( true, false ) )
// it ( "Can search with case-sensitive regexp search", () => assert.equal ( true, false ) )
// it ( "Can search with case-insensitive regexp search", () => assert.equal ( true, false ) )
})
// describe ( "Cookies", function () {
// it ( "Will disappears when expires", function () {})
// it ( "Will export cookies correctly", function () {})
// it ( "Will import cookies correctly", function () {})
// })
//
// describe ( "Create", function () {
// it ( "", function () {})
// })
//
// describe ( "Delete", function () {
// it ( "", function () {})
// })
//
// describe ( "Protect", function () {
// it ( "", function () {})
// })
//
// describe ( "Block", function () {
// it ( "", function () {})
// })
})
|
alexeyz041/toolbox | rtsp-streamer/source.h | <filename>rtsp-streamer/source.h
#ifndef _SOURCE_H
#define _SOURCE_H
#ifndef _FRAMED_SOURCE_HH
#include "FramedSource.hh"
#endif
#include <stdlib.h>
#include <stdio.h>
#include <memory.h>
#include <deque>
#ifdef USE_X264
#include "encoder.h"
#else
#include "encoder2.h"
#endif
class SourceParameters {
public:
SourceParameters(int w,int h,int fps) : width(w), height(h), fps(fps) {}
int width;
int height;
int fps;
};
class Buffer {
public:
Buffer(Buffer &b) : Buffer(b.buf,b.len,b.ts)
{
}
Buffer(const Buffer &b) : Buffer(b.buf,b.len,b.ts)
{
}
Buffer(uint8_t *b,int len,uint64_t ts) : len(len),ts(ts)
{
buf = (uint8_t *)malloc(len);
if(buf) {
memcpy(buf,b,len);
}
}
~Buffer()
{
if(buf) {
free(buf);
buf = nullptr;
}
}
public:
uint8_t *buf;
int len;
uint64_t ts;
};
#ifdef USE_X264
class Source: public FramedSource, public Encoder {
#else
class Source: public FramedSource, public Encoder2 {
#endif
public:
static Source* createNew(UsageEnvironment& env, SourceParameters params);
public:
static EventTriggerId eventTriggerId;
// Note that this is defined here to be a static class variable, because this code is intended to illustrate how to
// encapsulate a *single* device - not a set of devices.
// You can, however, redefine this to be a non-static member variable.
protected:
Source(UsageEnvironment& env, SourceParameters params);
// called only by createNew(), or by subclass constructors
virtual ~Source();
private:
// redefined virtual functions:
virtual void doGetNextFrame();
//virtual void doStopGettingFrames(); // optional
virtual void write(uint8_t *buf,int len,uint64_t ts);
private:
static void deliverFrame0(void* clientData);
void deliverFrame();
private:
static unsigned referenceCount; // used to count how many instances of this class currently exist
SourceParameters fParams;
std::deque<Buffer> fifo;
bool bNeedEvent;
struct timeval ptime;
};
#endif //_SOURCE_H
|
vjpr/swc | ecmascript/minifier/tests/terser/compress/typeof/duplicate_lambda_arg_name/output.terser.js | <reponame>vjpr/swc
console.log(
(function long_name(long_name) {
return typeof long_name;
})()
);
|
Andreas237/AndroidPolicyAutomation | ExtractedJars/RT_News_com.rt.mobile.english/javafiles/com/google/android/gms/internal/ads/zziy.java | // Decompiled by Jad v1.5.8g. Copyright 2001 <NAME>.
// Jad home page: http://www.kpdus.com/jad.html
// Decompiler options: packimports(3) annotate safe
package com.google.android.gms.internal.ads;
import java.io.IOException;
// Referenced classes of package com.google.android.gms.internal.ads:
// zzbfc, zzbez, zziw, zzia,
// zzbfa, zzbfi
public final class zziy extends zzbfc
{
public zziy()
{
// 0 0:aload_0
// 1 1:invokespecial #16 <Method void zzbfc()>
zzanu = null;
// 2 4:aload_0
// 3 5:aconst_null
// 4 6:putfield #18 <Field Integer zzanu>
zzapn = null;
// 5 9:aload_0
// 6 10:aconst_null
// 7 11:putfield #20 <Field zziw zzapn>
zzape = null;
// 8 14:aload_0
// 9 15:aconst_null
// 10 16:putfield #22 <Field Integer zzape>
zzapf = null;
// 11 19:aload_0
// 12 20:aconst_null
// 13 21:putfield #24 <Field Integer zzapf>
zzapr = null;
// 14 24:aload_0
// 15 25:aconst_null
// 16 26:putfield #26 <Field Integer zzapr>
zzebk = null;
// 17 29:aload_0
// 18 30:aconst_null
// 19 31:putfield #30 <Field zzbfe zzebk>
zzebt = -1;
// 20 34:aload_0
// 21 35:iconst_m1
// 22 36:putfield #34 <Field int zzebt>
// 23 39:return
}
private final zziy zzv(zzbez zzbez1)
throws IOException
{
IllegalArgumentException illegalargumentexception;
do
{
int i = zzbez1.zzabk();
// 0 0:aload_1
// 1 1:invokevirtual #47 <Method int zzbez.zzabk()>
// 2 4:istore_2
if(i == 0)
break;
// 3 5:iload_2
// 4 6:ifeq 158
if(i != 8)
//* 5 9:iload_2
//* 6 10:bipush 8
//* 7 12:icmpeq 121
{
if(i != 18)
//* 8 15:iload_2
//* 9 16:bipush 18
//* 10 18:icmpeq 92
{
if(i != 24)
//* 11 21:iload_2
//* 12 22:bipush 24
//* 13 24:icmpeq 78
{
if(i != 32)
//* 14 27:iload_2
//* 15 28:bipush 32
//* 16 30:icmpeq 64
{
if(i != 40)
//* 17 33:iload_2
//* 18 34:bipush 40
//* 19 36:icmpeq 50
{
if(!super.zza(zzbez1, i))
//* 20 39:aload_0
//* 21 40:aload_1
//* 22 41:iload_2
//* 23 42:invokespecial #51 <Method boolean zzbfc.zza(zzbez, int)>
//* 24 45:ifne 0
return this;
// 25 48:aload_0
// 26 49:areturn
} else
{
zzapr = Integer.valueOf(zzbez1.zzacc());
// 27 50:aload_0
// 28 51:aload_1
// 29 52:invokevirtual #54 <Method int zzbez.zzacc()>
// 30 55:invokestatic #60 <Method Integer Integer.valueOf(int)>
// 31 58:putfield #26 <Field Integer zzapr>
}
} else
//* 32 61:goto 0
{
zzapf = Integer.valueOf(zzbez1.zzacc());
// 33 64:aload_0
// 34 65:aload_1
// 35 66:invokevirtual #54 <Method int zzbez.zzacc()>
// 36 69:invokestatic #60 <Method Integer Integer.valueOf(int)>
// 37 72:putfield #24 <Field Integer zzapf>
}
} else
//* 38 75:goto 0
{
zzape = Integer.valueOf(zzbez1.zzacc());
// 39 78:aload_0
// 40 79:aload_1
// 41 80:invokevirtual #54 <Method int zzbez.zzacc()>
// 42 83:invokestatic #60 <Method Integer Integer.valueOf(int)>
// 43 86:putfield #22 <Field Integer zzape>
}
} else
//* 44 89:goto 0
{
if(zzapn == null)
//* 45 92:aload_0
//* 46 93:getfield #20 <Field zziw zzapn>
//* 47 96:ifnonnull 110
zzapn = new zziw();
// 48 99:aload_0
// 49 100:new #62 <Class zziw>
// 50 103:dup
// 51 104:invokespecial #63 <Method void zziw()>
// 52 107:putfield #20 <Field zziw zzapn>
zzbez1.zza(((zzbfi) (zzapn)));
// 53 110:aload_1
// 54 111:aload_0
// 55 112:getfield #20 <Field zziw zzapn>
// 56 115:invokevirtual #66 <Method void zzbez.zza(zzbfi)>
}
} else
//* 57 118:goto 0
{
int j = zzbez1.getPosition();
// 58 121:aload_1
// 59 122:invokevirtual #69 <Method int zzbez.getPosition()>
// 60 125:istore_3
try
{
zzanu = Integer.valueOf(zzia.zzd(zzbez1.zzacc()));
// 61 126:aload_0
// 62 127:aload_1
// 63 128:invokevirtual #54 <Method int zzbez.zzacc()>
// 64 131:invokestatic #75 <Method int zzia.zzd(int)>
// 65 134:invokestatic #60 <Method Integer Integer.valueOf(int)>
// 66 137:putfield #18 <Field Integer zzanu>
}
//* 67 140:goto 0
//* 68 143:aload_1
//* 69 144:iload_3
//* 70 145:invokevirtual #79 <Method void zzbez.zzdc(int)>
//* 71 148:aload_0
//* 72 149:aload_1
//* 73 150:iload_2
//* 74 151:invokevirtual #51 <Method boolean zzbfc.zza(zzbez, int)>
//* 75 154:pop
//* 76 155:goto 0
//* 77 158:aload_0
//* 78 159:areturn
// Misplaced declaration of an exception variable
catch(IllegalArgumentException illegalargumentexception)
{
zzbez1.zzdc(j);
((zzbfc)this).zza(zzbez1, i);
}
}
} while(true);
return this;
//* 79 160:astore 4
//* 80 162:goto 143
}
public final zzbfi zza(zzbez zzbez1)
throws IOException
{
return ((zzbfi) (zzv(zzbez1)));
// 0 0:aload_0
// 1 1:aload_1
// 2 2:invokespecial #83 <Method zziy zzv(zzbez)>
// 3 5:areturn
}
public final void zza(zzbfa zzbfa1)
throws IOException
{
if(zzanu != null)
//* 0 0:aload_0
//* 1 1:getfield #18 <Field Integer zzanu>
//* 2 4:ifnull 19
zzbfa1.zzm(1, zzanu.intValue());
// 3 7:aload_1
// 4 8:iconst_1
// 5 9:aload_0
// 6 10:getfield #18 <Field Integer zzanu>
// 7 13:invokevirtual #87 <Method int Integer.intValue()>
// 8 16:invokevirtual #93 <Method void zzbfa.zzm(int, int)>
if(zzapn != null)
//* 9 19:aload_0
//* 10 20:getfield #20 <Field zziw zzapn>
//* 11 23:ifnull 35
zzbfa1.zza(2, ((zzbfi) (zzapn)));
// 12 26:aload_1
// 13 27:iconst_2
// 14 28:aload_0
// 15 29:getfield #20 <Field zziw zzapn>
// 16 32:invokevirtual #96 <Method void zzbfa.zza(int, zzbfi)>
if(zzape != null)
//* 17 35:aload_0
//* 18 36:getfield #22 <Field Integer zzape>
//* 19 39:ifnull 54
zzbfa1.zzm(3, zzape.intValue());
// 20 42:aload_1
// 21 43:iconst_3
// 22 44:aload_0
// 23 45:getfield #22 <Field Integer zzape>
// 24 48:invokevirtual #87 <Method int Integer.intValue()>
// 25 51:invokevirtual #93 <Method void zzbfa.zzm(int, int)>
if(zzapf != null)
//* 26 54:aload_0
//* 27 55:getfield #24 <Field Integer zzapf>
//* 28 58:ifnull 73
zzbfa1.zzm(4, zzapf.intValue());
// 29 61:aload_1
// 30 62:iconst_4
// 31 63:aload_0
// 32 64:getfield #24 <Field Integer zzapf>
// 33 67:invokevirtual #87 <Method int Integer.intValue()>
// 34 70:invokevirtual #93 <Method void zzbfa.zzm(int, int)>
if(zzapr != null)
//* 35 73:aload_0
//* 36 74:getfield #26 <Field Integer zzapr>
//* 37 77:ifnull 92
zzbfa1.zzm(5, zzapr.intValue());
// 38 80:aload_1
// 39 81:iconst_5
// 40 82:aload_0
// 41 83:getfield #26 <Field Integer zzapr>
// 42 86:invokevirtual #87 <Method int Integer.intValue()>
// 43 89:invokevirtual #93 <Method void zzbfa.zzm(int, int)>
super.zza(zzbfa1);
// 44 92:aload_0
// 45 93:aload_1
// 46 94:invokespecial #98 <Method void zzbfc.zza(zzbfa)>
// 47 97:return
}
protected final int zzr()
{
int j = super.zzr();
// 0 0:aload_0
// 1 1:invokespecial #101 <Method int zzbfc.zzr()>
// 2 4:istore_2
int i = j;
// 3 5:iload_2
// 4 6:istore_1
if(zzanu != null)
//* 5 7:aload_0
//* 6 8:getfield #18 <Field Integer zzanu>
//* 7 11:ifnull 28
i = j + zzbfa.zzq(1, zzanu.intValue());
// 8 14:iload_2
// 9 15:iconst_1
// 10 16:aload_0
// 11 17:getfield #18 <Field Integer zzanu>
// 12 20:invokevirtual #87 <Method int Integer.intValue()>
// 13 23:invokestatic #105 <Method int zzbfa.zzq(int, int)>
// 14 26:iadd
// 15 27:istore_1
j = i;
// 16 28:iload_1
// 17 29:istore_2
if(zzapn != null)
//* 18 30:aload_0
//* 19 31:getfield #20 <Field zziw zzapn>
//* 20 34:ifnull 48
j = i + zzbfa.zzb(2, ((zzbfi) (zzapn)));
// 21 37:iload_1
// 22 38:iconst_2
// 23 39:aload_0
// 24 40:getfield #20 <Field zziw zzapn>
// 25 43:invokestatic #109 <Method int zzbfa.zzb(int, zzbfi)>
// 26 46:iadd
// 27 47:istore_2
i = j;
// 28 48:iload_2
// 29 49:istore_1
if(zzape != null)
//* 30 50:aload_0
//* 31 51:getfield #22 <Field Integer zzape>
//* 32 54:ifnull 71
i = j + zzbfa.zzq(3, zzape.intValue());
// 33 57:iload_2
// 34 58:iconst_3
// 35 59:aload_0
// 36 60:getfield #22 <Field Integer zzape>
// 37 63:invokevirtual #87 <Method int Integer.intValue()>
// 38 66:invokestatic #105 <Method int zzbfa.zzq(int, int)>
// 39 69:iadd
// 40 70:istore_1
j = i;
// 41 71:iload_1
// 42 72:istore_2
if(zzapf != null)
//* 43 73:aload_0
//* 44 74:getfield #24 <Field Integer zzapf>
//* 45 77:ifnull 94
j = i + zzbfa.zzq(4, zzapf.intValue());
// 46 80:iload_1
// 47 81:iconst_4
// 48 82:aload_0
// 49 83:getfield #24 <Field Integer zzapf>
// 50 86:invokevirtual #87 <Method int Integer.intValue()>
// 51 89:invokestatic #105 <Method int zzbfa.zzq(int, int)>
// 52 92:iadd
// 53 93:istore_2
i = j;
// 54 94:iload_2
// 55 95:istore_1
if(zzapr != null)
//* 56 96:aload_0
//* 57 97:getfield #26 <Field Integer zzapr>
//* 58 100:ifnull 117
i = j + zzbfa.zzq(5, zzapr.intValue());
// 59 103:iload_2
// 60 104:iconst_5
// 61 105:aload_0
// 62 106:getfield #26 <Field Integer zzapr>
// 63 109:invokevirtual #87 <Method int Integer.intValue()>
// 64 112:invokestatic #105 <Method int zzbfa.zzq(int, int)>
// 65 115:iadd
// 66 116:istore_1
return i;
// 67 117:iload_1
// 68 118:ireturn
}
private Integer zzanu;
private Integer zzape;
private Integer zzapf;
private zziw zzapn;
private Integer zzapr;
}
|
kane-chen/mview | mview.worker/src/main/java/cn/kane/mview/worker/resource/loader/builder/PageBuilder.java | <gh_stars>0
package cn.kane.mview.worker.resource.loader.builder;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import cn.kane.mview.service.definition.entity.DefinitionKey;
import cn.kane.mview.service.definition.entity.PageDefinition;
import cn.kane.mview.service.definition.entity.TemplateDefinition;
import cn.kane.mview.service.definition.service.PageDefinitionManager;
import cn.kane.mview.service.definition.service.TemplateDefinitionManager;
import cn.kane.mview.service.resource.entity.Page;
public class PageBuilder implements ResourceBuilder<Page> {
@Autowired
private PageDefinitionManager pageDefinitionManager ;
@Autowired
private TemplateDefinitionManager templateDefinitionManager ;
@Override
public Page build(DefinitionKey definitionKey) {
if(null == definitionKey){
return null ;
}
return this.buildResourceInstance(definitionKey);
}
private Page buildResourceInstance(DefinitionKey definitionKey) {
if(null == definitionKey){
return null;
}
PageDefinition pageDefinition = pageDefinitionManager.get(definitionKey) ;
Page page = this.buildPage(pageDefinition) ;
return page ;
}
private Page buildPage(PageDefinition definition){
if(null == definition){
return null ;
}
Page page = new Page() ;
page.setPageDefinition(definition);
page.setDefinitionKey(definition.getKey());
//layout
TemplateDefinition layoutDefinition = templateDefinitionManager.get(definition.getLayoutDefinition()) ;
if(null!=layoutDefinition){
page.setLayout(layoutDefinition.getContent());
}
//css
TemplateDefinition cssDefinition = templateDefinitionManager.get(definition.getCssDefinition()) ;
if(null!=cssDefinition){
page.setCss(cssDefinition.getContent());
}
//js
TemplateDefinition jsDefinition = templateDefinitionManager.get(definition.getJsDefinition()) ;
if(null!=jsDefinition){
page.setJs(jsDefinition.getContent());
}
//widget
List<DefinitionKey> widgetDefinitions = definition.getWidgetDefinitions() ;
if(null!=widgetDefinitions){
page.setWidgetKeys(widgetDefinitions);
}
//dataReader
List<DefinitionKey> dataReaderDefinitionKeys = definition.getDataReaderDefinitions() ;
if(null!=dataReaderDefinitionKeys && !dataReaderDefinitionKeys.isEmpty()){
page.setDataReaderKeys(dataReaderDefinitionKeys);
}
return page ;
}
}
|
narendly/VoogaSalad | voogasalad/player/leveldatamanager/LevelData.java | <reponame>narendly/VoogaSalad<gh_stars>1-10
package player.leveldatamanager;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.HashMap;
import authoring.interfaces.Elementable;
import authoring.model.VoogaFrontEndText;
import events.AnimationEvent;
import events.AnimationFactory;
import events.VoogaEvent;
import gameengine.Sprite;
import gameengine.SpriteFactory;
import javafx.scene.Node;
import physics.IPhysicsEngine;
import resources.VoogaBundles;
import tools.Pair;
import tools.PairZAxisComparator;
import tools.VoogaBoolean;
import tools.VoogaJukebox;
import tools.VoogaString;
import tools.interfaces.VoogaData;
/**
* A centralized class to contain and access data including Sprites, Text,
* Global Variables, and Events
*
* @author <NAME>
*/
public class LevelData implements ILevelData {
private static final String SAVE_PROGRESS = VoogaBundles.defaultglobalvars.getProperty("SaveProgress");
private IPhysicsEngine myPhysics;
private String myMainCharID;
private Map<String, Elementable> myElements;
private SpriteFactory mySpriteFactory;
private AnimationFactory myAnimationFactory;
private Map<String, VoogaData> myGlobalVariables;
private KeyEventContainer myKeyEventContainer;
private String myTimerKey;
private String myNextLevelKey;
private LevelTransitioner myTransitioner;
private ResourceBundle myEventMethods;
/**
* Default constructor that takes in an instance of a physics module
*
* @param physicsengine
*/
public LevelData(IPhysicsEngine physicsengine) {
myEventMethods = VoogaBundles.EventMethods;
myKeyEventContainer = new KeyEventContainer();
myPhysics = physicsengine;
myElements = new HashMap<>();
myGlobalVariables = new HashMap<>();
myNextLevelKey = VoogaBundles.defaultglobalvars.getProperty("NextLevelIndex");
myTimerKey = VoogaBundles.defaultglobalvars.getProperty("Time");
}
/**
* Returns a list of sprite IDs given an archetype
*
* @param archetype
* @return
*/
public List<Sprite> getSpritesByArch(String archetype) {
List<Sprite> list = new ArrayList<>();
for (String id : myElements.keySet()) {
if (myElements.get(id) instanceof Sprite && ((Sprite) myElements.get(id)).getArchetype().equals(archetype)) {
list.add((Sprite) myElements.get(id));
}
}
return list;
}
/**
* Returns animation created by AnimationFactory
*/
public AnimationEvent getAnimationFromFactory(String animationString) {
if (myAnimationFactory.getMyAnimationSequences().containsKey(animationString)) {
List<AnimationEvent> clonedSequence = myAnimationFactory.cloneAnimationSequence(animationString);
return clonedSequence.get(0);
} else {
return myAnimationFactory.cloneAnimationEvent(animationString);
}
}
/**
* Adds a sprite as a member of the given archetype
*
* @param archetype
* @return
*/
public Sprite addSprite(String archetype) {
Elementable newSprite = mySpriteFactory.createSprite(archetype);
myElements.put(newSprite.getId(), newSprite);
return (Sprite) newSprite;
}
/**
* Returns a Global Variable (VoogaData) as specified by its variable name
*
* @param variable
* @return
*/
public VoogaData getGlobalVar(String variable) {
return myGlobalVariables.get(variable);
}
/**
* Returns a text object by ID
*
* @param id
* @return
*/
public VoogaFrontEndText getText(Object id) {
return (VoogaFrontEndText) myElements.get(id);
}
/**
* Put all objects into a pair of displayable objects
*
* @return
*/
public List<Pair<Node, Boolean>> getDisplayableNodes() {
List<Pair<Node, Boolean>> displayablenodes = new ArrayList<>();
for (Object key : myElements.keySet()) {
Boolean isStatic = (Boolean) myElements.get(key).getVoogaProperties().get(VoogaBundles.spriteProperties.getString("STATIC")).getProperty().getValue();
displayablenodes.add(new Pair<Node, Boolean>(myElements.get(key).getNodeObject(), isStatic));
}
displayablenodes.sort(new PairZAxisComparator());
return displayablenodes;
}
/**
* Add a given event and populate the pressed and released KeyCombos
*/
public void addEventAndPopulateKeyCombos(VoogaEvent event) {
myKeyEventContainer.addEventAndPopulateKeyCombos(event, myEventMethods);
}
/**
* Refreshes the data and restarts timer in global variable and sets level
*
* @param levelfilename
*/
public void refreshLevelData(String levelfilename) {
myTransitioner = new LevelTransitioner(levelfilename, myElements, myKeyEventContainer, myGlobalVariables, myNextLevelKey);
myElements = myTransitioner.populateNewSprites();
myKeyEventContainer = myTransitioner.populateNewEvents();
myGlobalVariables = myTransitioner.populateNewGlobals();
VoogaJukebox.getInstance().stopBGM();
VoogaJukebox.getInstance().setBGM((String) myGlobalVariables.get(Paths.get(levelfilename).getFileName().toString().replace(".xml", "")+"BGM").getValue());
VoogaJukebox.getInstance().playBGM();
mySpriteFactory = myTransitioner.getNewSpriteFactory();
myMainCharID = myTransitioner.getMainCharID();
myAnimationFactory = myTransitioner.getAnimationFactory();
}
/**
* Update the global timer double
*
* @param time
*/
public void updatedGlobalTimer(double time) {
myGlobalVariables.get(myTimerKey).setValue(new Double(time));
}
/**
* Saves current game progress into a XML file
*/
public void saveProgress(String filePath) {
myGlobalVariables.put(SAVE_PROGRESS, new VoogaBoolean(false));
GameSaver saver = new GameSaver(myElements, myKeyEventContainer, myGlobalVariables, mySpriteFactory,
myAnimationFactory);
saver.saveCurrentProgress(filePath);
}
public String getNextLevelName() {
return ((String) (((VoogaString) myGlobalVariables.get(myNextLevelKey)).getValue()));
}
public boolean getSaveNow() {
return (Boolean) (((VoogaBoolean) myGlobalVariables.get(SAVE_PROGRESS)).getValue());
}
public void setNextLevelName(String levelName) {
myGlobalVariables.put(myNextLevelKey, new VoogaString(levelName));
}
public Sprite getSpriteByID(String id) {
return (Sprite) myElements.get(id);
}
public void removeSpriteByID(String id) {
myElements.remove(id);
}
public Boolean containsSprite(String id) {
return myElements.containsKey(id);
}
public Sprite getMainSprite() {
return getSpriteByID(myMainCharID);
}
public IPhysicsEngine getPhysicsEngine() {
return myPhysics;
}
public KeyEventContainer getKeyEventContainer() {
return myKeyEventContainer;
}
public Map<String, VoogaData> getGlobalVariables() {
return myGlobalVariables;
}
@Override
public Map<String, Elementable> getElements() {
return myElements;
}
public Set<Entry<String, Elementable>> getElementables() {
return myElements.entrySet();
}
} |
null-kryptonian/ProblemSolving | HackerRank/10 days of Statistics/Day 1 Standard Deviation.cpp | #include <cmath>
#include <cstdio>
#include <vector>
#include <iostream>
#include <algorithm>
using namespace std;
int main() {
/* Enter your code here. Read input from STDIN. Print output to STDOUT */
int n;
cin >> n;
int arr[n], sum = 0;
double mean = 0.0, result = 0.0, variance = 0.0, stdDeviation = 0.0;
for(int i = 0; i < n; ++i){
cin >> arr[i];
sum += arr[i];
}
mean = sum / (n*1.0);
for(int i = 0; i < n; ++i){
result += pow((arr[i] - mean), 2);
}
variance = result / (n*1.0);
stdDeviation = sqrt(variance);
printf("%.1f", stdDeviation);
return 0;
}
|
jjzhang166/zzilla_opencvr | include/mining/include/minterfacemgr.hpp | //------------------------------------------------------------------------------
// File: minterfacemgr.hpp
//
// Desc: Interface manager for Data Mining.
//
// Copyright (c) 2014-2018. veyesys.com All rights reserved.
//------------------------------------------------------------------------------
#ifndef __M_INTERFACE_MGR_HPP__
#define __M_INTERFACE_MGR_HPP__
#include "utility.hpp"
#include "debug.hpp"
#include "videotype.hpp"
#include "miningtype.hpp"
#include "minterface.hpp"
#include "factory.hpp"
class MiningInterfaceMgr
{
public:
inline MiningInterfaceMgr(u32 id, Factory &pFactory, MiningInterface * pDevice);
inline ~MiningInterfaceMgr();
public:
inline BOOL Init();
inline BOOL Cleanup();
inline void RawHandler1(RawFrame& frame);
inline static void RawHandler(RawFrame& frame, void * pParam);
inline void SeqHandler1(VideoSeqFrame& frame);
inline static void SeqHandler(VideoSeqFrame& frame, void * pParam);
inline int GetId()
{
return m_id;
}
private:
MiningInterface * m_pDevice;
u32 m_id;
Factory &m_pFactory;
MMReqStream m_type;
};
#include "minterfacemgrimpl.hpp"
#endif /* __M_INTERFACE_MGR_HPP__ */
|
phpmob/chang-admin | src/PhpMob/CmsBundle/Resources/private/js/lib/submit-spinner.js | $.fn.spinner = function (type, side) {
var $el = $(this);
var method = 'left' === side ? 'prepend' : 'append';
if ('remove' === type) {
$el.removeClass('spinning');
$el.find('.submit-spinner').remove();
return this;
}
$el.addClass('spinning');
$el[method](
'<div class="submit-spinner">\n' +
' <div class="bounce1"></div>\n' +
' <div class="bounce2"></div>\n' +
' <div class="bounce3"></div>\n' +
'</div>'
);
return this;
};
|
jnthn/intellij-community | python/testData/copyPaste/BeginningOfIndentedLinePrecededByPastedWord.src.py | <reponame>jnthn/intellij-community<filename>python/testData/copyPaste/BeginningOfIndentedLinePrecededByPastedWord.src.py
<selection>CellClass.</selection> |
choi360/42bangkok-libft | test/libft_test/tests/Part1_functions/ft_memchr/main.c | <filename>test/libft_test/tests/Part1_functions/ft_memchr/main.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* main.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: jtoty <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2017/02/28 12:01:40 by jtoty #+# #+# */
/* Updated: 2017/03/09 15:39:41 by jtoty ### ########.fr */
/* */
/* ************************************************************************** */
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include "libft.h"
#include <stdio.h>
static void ft_print_result(const char *s)
{
int len;
len = 0;
while (s[len])
len++;
write(1, s, len);
}
static void check_memchr(void *s, char c, int n)
{
const char *str;
str = ft_memchr(s, c, n);
if (!str)
ft_print_result("NULL");
else
ft_print_result(str);
}
int main(int argc, const char *argv[])
{
int arg;
alarm(5);
if (argc == 1)
return (0);
else if ((arg = atoi(argv[1])) == 1)
check_memchr("bonjour", 'b', 4);
else if (arg == 2)
check_memchr("bonjour", 'o', 7);
else if (arg == 3)
check_memchr("bonjourno", 'n', 2);
else if (arg == 4)
check_memchr("bonjour", 'j', 6);
else if (arg == 5)
check_memchr("bonjour", 's', 7);
else if (arg == 6)
{
int tab[7] = {-49, 49, 1, -1, 0, -2, 2};
printf("%s", (char *)ft_memchr(tab, -1, 7));
}
return (0);
}
|
umer-rs/runelite | runescape-client/src/main/java/class251.java | import net.runelite.mapping.ObfuscatedName;
@ObfuscatedName("is")
public interface class251 {
}
|
talCrafts/Udhari | app/src/main/java/org/talcrafts/udhari/tx/DatePickerFragment.java | package org.talcrafts.udhari.tx;
import android.app.DatePickerDialog;
import android.app.Dialog;
import android.os.Bundle;
import androidx.fragment.app.DialogFragment;
import java.util.Calendar;
/* Wrapper to show a managed date picker */
public class DatePickerFragment extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
// Use the current date as the default date in the picker
final Calendar c = Calendar.getInstance();
int year = c.get(Calendar.YEAR);
int month = c.get(Calendar.MONTH);
int day = c.get(Calendar.DAY_OF_MONTH);
// Create a new instance of DatePickerDialog and return it
DatePickerDialog datePickerDialog = new DatePickerDialog(getActivity(),
(DatePickerDialog.OnDateSetListener) getActivity(), year, month, day);
c.set(Calendar.HOUR_OF_DAY, 12);
c.set(Calendar.MINUTE, 0);
c.set(Calendar.SECOND, 0);
datePickerDialog.getDatePicker().setMinDate(c.getTimeInMillis());
return datePickerDialog;
}
}
|
mantamusica/interfacesDesign | NetBeansProjects/Sockets_Ejercicio1/src/sockets_ejercicio2/Server.java | package sockets_ejercicio2;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.ServerSocket;
import java.net.Socket;
import sun.security.pkcs11.wrapper.Constants;
public class Server {
public static void main(String[] args) {
try {
String ficheroPedido;
byte[] bytesArchivo = new byte[1024];
int in;
//Socket
ServerSocket puerto = null;
Socket canalComunicacion = null;
puerto = new ServerSocket(1500);
canalComunicacion = puerto.accept();
// Entrada De Datos
InputStream bufferEntrada;
DataInputStream datosEntrada;
bufferEntrada = canalComunicacion.getInputStream();
datosEntrada = new DataInputStream(bufferEntrada);
// Salida De Datos
ficheroPedido=datosEntrada.readUTF();
File archivo = new File(ficheroPedido);
OutputStream bufferSalida;
DataOutputStream datosSalida;
bufferSalida = canalComunicacion.getOutputStream();
datosSalida = new DataOutputStream(bufferSalida);
// Deficinion De Variables
String Strings_Dentro;
// Creamos El Objeto Para Leer De Ficheros
BufferedReader leer_fichero = new BufferedReader(new FileReader(archivo));
try {
// Mientras La siguiente linea no sea nula sacamos la String
while ((Strings_Dentro = leer_fichero.readLine()) != null) {
// Imprimimos en pantalla la String Sacada
datosSalida.writeUTF(Strings_Dentro);
}
datosSalida.writeUTF("Acabe");
// Capturamos Excepciones
} catch (IOException error) {
System.out.println("Hubo Un Error De Escritura O_o");
}
// Cerramos flujos
leer_fichero.close();
bufferEntrada.close();
bufferSalida.close();
datosEntrada.close();
datosSalida.close();
} catch (IOException ex) {
}
}
}
|
LeovR/rtpmidi | rtp-midi-core/src/main/java/io/github/leovr/rtipmidi/AppleMidiServer.java | package io.github.leovr.rtipmidi;
import io.github.leovr.rtipmidi.session.AppleMidiSession;
import io.github.leovr.rtipmidi.session.SessionChangeListener;
import io.github.leovr.rtipmidi.control.AppleMidiControlServer;
import io.github.leovr.rtipmidi.session.AppleMidiSessionServer;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import javax.annotation.Nonnull;
/**
* Main class for the RTP MIDI communication. This class instantiates the {@link AppleMidiControlServer} and the {@link
* AppleMidiSessionServer}. In order to receive midi messages a {@link AppleMidiSession} should be registerd via {@link
* #addAppleMidiSession(AppleMidiSession)}.
*/
@Slf4j
public class AppleMidiServer implements SessionChangeListener {
private static final int DEFAULT_PORT = 50004;
private static final String DEFAULT_NAME = "rtpMIDIJava";
@Getter
private final int port;
private final AppleMidiControlServer controlServer;
private final AppleMidiSessionServer sessionServer;
/**
* Creates a {@link AppleMidiServer} with {@link #DEFAULT_NAME} and {@link #DEFAULT_PORT}
*/
public AppleMidiServer() {
this(DEFAULT_NAME, DEFAULT_PORT);
}
/**
* Creates a new {@link AppleMidiServer} with the given name and port
*
* @param name The name under which the other peers should see this server
* @param port The control port. A session server will be created on the {@code port + 1}
*/
public AppleMidiServer(@Nonnull final String name, final int port) {
this.port = port;
controlServer = new AppleMidiControlServer(name, port);
sessionServer = new AppleMidiSessionServer(name, port + 1);
sessionServer.registerSessionChangeListener(this);
controlServer.registerEndSessionListener(sessionServer);
}
/**
* Add a new {@link AppleMidiSession} to this server
*
* @param session The session to be added
*/
public void addAppleMidiSession(@Nonnull final AppleMidiSession session) {
sessionServer.addAppleMidiSession(session);
}
/**
* Remove the {@link AppleMidiSession} from this server
*
* @param session The session to be removed
*/
public void removeAppleMidiSession(@Nonnull final AppleMidiSession session) {
sessionServer.removeAppleMidiSession(session);
}
@Override
public void onMaxNumberOfSessionsChange(final int maxNumberOfSessions) {
controlServer.setMaxNumberOfSessions(maxNumberOfSessions);
}
/**
* Starts the control server and the session server
*/
public void start() {
sessionServer.start();
controlServer.start();
log.info("AppleMidiServer started");
}
/**
* Stops the session server and the control server
*/
public void stop() {
sessionServer.stopServer();
controlServer.stopServer();
log.info("AppleMidiServer stopped");
}
}
|
Quantify-world/react-styleguidist-fix-react-docgen | lib/rsg-components/Name/NameRenderer.js | <reponame>Quantify-world/react-styleguidist-fix-react-docgen
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.NameRenderer = NameRenderer;
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _propTypes = require('prop-types');
var _propTypes2 = _interopRequireDefault(_propTypes);
var _Code = require('rsg-components/Code');
var _Code2 = _interopRequireDefault(_Code);
var _Styled = require('rsg-components/Styled');
var _Styled2 = _interopRequireDefault(_Styled);
var _classnames = require('classnames');
var _classnames2 = _interopRequireDefault(_classnames);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var styles = function styles(_ref) {
var fontFamily = _ref.fontFamily,
fontSize = _ref.fontSize,
color = _ref.color;
return {
name: {
fontFamily: fontFamily.monospace,
fontSize: fontSize.small,
color: color.name
},
isDeprecated: {
color: color.light,
textDecoration: 'line-through'
}
};
};
function NameRenderer(_ref2) {
var classes = _ref2.classes,
name = _ref2.name,
deprecated = _ref2.deprecated;
var classNames = (0, _classnames2.default)(classes.name, _defineProperty({}, classes.isDeprecated, deprecated));
return _react2.default.createElement(
_Code2.default,
{ className: classNames },
name
);
}
NameRenderer.propTypes = {
classes: _propTypes2.default.object.isRequired,
name: _propTypes2.default.string.isRequired,
deprecated: _propTypes2.default.bool
};
exports.default = (0, _Styled2.default)(styles)(NameRenderer); |
GuillaumeLahi/Bauhaus | packages/utilities/src/components/editor-html/editor-html.spec.js | import React from 'react';
import { render } from '@testing-library/react';
import EditorHTML from '.';
describe('editor-html', () => {
it('renders without crashing', () => {
const onChange = () => '';
render(<EditorHTML text="text" handleChange={onChange} smart={true} />);
});
});
|
dsager/article_json | lib/article_json/import/google_doc/html/image_parser.rb | module ArticleJSON
module Import
module GoogleDoc
module HTML
class ImageParser
include Shared::Caption
include Shared::Float
# @param [Nokogiri::HTML::Node] node
# @param [Nokogiri::HTML::Node] caption_node
# @param [ArticleJSON::Import::GoogleDoc::HTML::CSSAnalyzer] css_analyzer
def initialize(node:, caption_node:, css_analyzer:)
@node = node
@caption_node = caption_node
@css_analyzer = css_analyzer
# Main node indicates the floating behavior
@float_node = @node
end
# The value of the image's `src` attribute
# @return [String]
def source_url
image_node.attribute('src').value
end
# The node of the actual image
# @return [Nokogiri::HTML::Node]
def image_node
@node.xpath('.//img').first
end
# Check if the image is floating (left, right or not at all)
# @return [Symbol]
def float
super if floatable_size?
end
# @return [ArticleJSON::Elements::Image]
def element
ArticleJSON::Elements::Image.new(
source_url: source_url,
float: float,
caption: caption
)
end
private
# Check if the image's width can be determined and is less than 500px
# This is about 3/4 of the google document width...
# @return [Boolean]
def floatable_size?
image_width && image_width < 500
end
# Get the specified width of the image if available
# The width can either be specified in a width attribute or via style
# attribute. If not, `nil` is returned.
# @return [Integer]
def image_width
@image_width ||=
if image_node.has_attribute?('width')
image_node.attribute('width').value.to_i
elsif image_node.has_attribute?('style')
regex = /width:\s?(?<px>\d+|(\d+?\.\d+))px/
match = image_node.attribute('style').value.match(regex)
match['px'].to_i if match && match['px']
end
end
end
end
end
end
end
|
marc-christian-schulze/aws-sdk-java-v2 | core/metrics-spi/src/main/java/software/amazon/awssdk/metrics/NoOpMetricCollector.java | <reponame>marc-christian-schulze/aws-sdk-java-v2
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.awssdk.metrics;
import software.amazon.awssdk.annotations.SdkPreviewApi;
import software.amazon.awssdk.annotations.SdkPublicApi;
/**
* A metric collector that doesn't do anything.
*
* <b>NOTE:</b> This is a Preview API and is subject to change so it should not be used in production.
*/
@SdkPreviewApi
@SdkPublicApi
public final class NoOpMetricCollector implements MetricCollector {
private static final NoOpMetricCollector INSTANCE = new NoOpMetricCollector();
private NoOpMetricCollector() {
}
@Override
public String name() {
return "NoOp";
}
@Override
public <T> void reportMetric(SdkMetric<T> metric, T data) {
}
@Override
public MetricCollector createChild(String name) {
throw new UnsupportedOperationException("No op collector does not support createChild");
}
@Override
public MetricCollection collect() {
throw new UnsupportedOperationException("No op collector does not support collect");
}
public static NoOpMetricCollector create() {
return INSTANCE;
}
}
|
z415073783/MNN | source/backend/cpu/CPUCosineSimilarity.cpp | //
// CPUCosineSimilarity.cpp
// MNN
//
// Created by MNN on 2019/07/17.
// Copyright © 2018, Alibaba Group Holding Limited
//
#include "CPUCosineSimilarity.hpp"
#include <math.h>
#include "CPUBackend.hpp"
#include "Macro.h"
#include "Vec4.hpp"
namespace MNN {
ErrorCode CPUCosineSimilarity::onExecute(const std::vector<Tensor*>& inputs, const std::vector<Tensor*>& outputs) {
auto x1 = inputs[0];
auto x2 = inputs[1];
auto output = outputs[0];
const int batch = x1->batch();
const int batchStride = x1->stride(0);
const int channel = x1->channel();
const int channleStride = x1->stride(1);
const float eps = 1e-8f;
const auto x1DataPtr = x1->host<float>();
const auto x2DataPtr = x2->host<float>();
auto outputDataPtr = output->host<float>();
// the layout of input tensor is nchw
for (int i = 0; i < batch; ++i) {
const auto x1DataBatchPtr = x1DataPtr + i * batchStride;
const auto x2DataBatchPtr = x2DataPtr + i * batchStride;
auto outputDataBathPtr = outputDataPtr + i * channleStride;
int j = 0;
for (; j < channleStride; j += 4) {
const auto x1ChannelPtr = x1DataBatchPtr + j;
const auto x2ChannelPtr = x2DataBatchPtr + j;
Math::Vec4 innerProduct(.0f);
Math::Vec4 x1Square(.0f);
Math::Vec4 x2Square(.0f);
for (int c = 0; c < channel; ++c) {
Math::Vec4 x1Data = Math::Vec4::load(x1ChannelPtr + c * channleStride);
Math::Vec4 x2Data = Math::Vec4::load(x2ChannelPtr + c * channleStride);
auto x1Xx2 = x1Data * x2Data;
innerProduct = innerProduct + x1Xx2;
x1Square = x1Square + x1Data * x1Data;
x2Square = x2Square + x2Data * x2Data;
}
for (int k = 0; k < 4; ++k) {
outputDataBathPtr[j + k] = innerProduct[k] / sqrt(x1Square[k] * x2Square[k] + eps);
}
}
for (; j < channleStride; ++j) {
const auto x1ChannelPtr = x1DataBatchPtr + j;
const auto x2ChannelPtr = x2DataBatchPtr + j;
float innerProduct = .0f;
float x1Square = .0f;
float x2Square = .0f;
for (int c = 0; c < channel; ++c) {
float x1Data = x1ChannelPtr[c * channleStride];
float x2Data = x2ChannelPtr[c * channleStride];
innerProduct += x1Data * x2Data;
x1Square += x1Data * x1Data;
x2Square += x2Data * x2Data;
}
outputDataBathPtr[j] = innerProduct / sqrt(x1Square * x2Square + eps);
}
}
return NO_ERROR;
}
class CPUCosineSimilarityCreator : public CPUBackend::Creator {
public:
virtual Execution* onCreate(const std::vector<Tensor*>& inputs, const std::vector<Tensor*>& outputs,
const MNN::Op* op, Backend* backend) const {
return new CPUCosineSimilarity(backend, op);
}
};
REGISTER_CPU_OP_CREATOR(CPUCosineSimilarityCreator, OpType_CosineSimilarity);
} // namespace MNN
|
MagicSchooliOS/WCRLiveCorePod | Frameworks/WCRLiveCore.framework/Headers/WCRError.h | <filename>Frameworks/WCRLiveCore.framework/Headers/WCRError.h
//
// WCRError.h
// WCRLiveCore
//
// Created by wenssh on 2018/8/8.
// Copyright © 2018年 com.100tal. All rights reserved.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface WCRError : NSError
- (instancetype)initWithDomain:(NSErrorDomain)domain code:(NSInteger)code userInfo:(nullable NSDictionary<NSErrorUserInfoKey,id> *)dict;
+ (instancetype)errorWithDomain:(NSErrorDomain)domain code:(NSInteger)code userInfo:(nullable NSDictionary<NSErrorUserInfoKey,id> *)dict;
@end
NS_ASSUME_NONNULL_END
|
panos/haikudepotserver | haikudepotserver-webapp/src/main/java/org/haiku/haikudepotserver/multipage/package-info.java | /**
* <p>This package is concerned with the presentation of a simplified user interface that is driven by vanilla
* web pages as opposed to the "single page" approach taken in the main user interface for the application.</p>
*/
package org.haiku.haikudepotserver.multipage; |
lokijuhy/renku-python | tests/core/management/test_template.py | <reponame>lokijuhy/renku-python
# -*- coding: utf-8 -*-
#
# Copyright 2019-2021 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Template tests."""
import pytest
from renku.core import errors
from renku.core.management.template.template import (
FileAction,
TemplateAction,
copy_template_to_client,
fetch_templates_source,
get_file_actions,
)
from renku.core.management.template.usecase import check_for_template_update, update_template
from renku.core.models.template import TEMPLATE_MANIFEST
TEMPLATES_URL = "https://github.com/SwissDataScienceCenter/renku-project-template"
@pytest.mark.integration
@pytest.mark.parametrize("reference", [None, "master", "0.3.0"])
def test_template_fetch_from_git(reference):
"""Test fetching a template from git."""
templates_source = fetch_templates_source(source=TEMPLATES_URL, reference=reference)
assert reference == templates_source.reference
assert (templates_source.path / TEMPLATE_MANIFEST).exists()
for template in templates_source.templates:
assert template.path.exists()
def test_template_fetch_invalid_git_url():
"""Test fetching a template from an invalid git url."""
with pytest.raises(errors.InvalidTemplateError):
fetch_templates_source(source="invalid-url", reference=None)
@pytest.mark.integration
@pytest.mark.vcr
def test_template_fetch_invalid_git_reference():
"""Test fetching a template from an invalid reference."""
with pytest.raises(errors.InvalidTemplateError):
fetch_templates_source(source=TEMPLATES_URL, reference="invalid-ref")
def test_check_for_template_update(client_with_template, templates_source, client_database_injection_manager):
"""Test checking for a template update."""
templates_source.update(id="dummy", version="2.0.0")
with client_database_injection_manager(client_with_template):
updates_available, _, current_version, new_version = check_for_template_update(client_with_template)
assert updates_available is True
assert "1.0.0" == current_version
assert "2.0.0" == new_version
def test_template_update_files(client_with_template, templates_source, client_database_injection_manager):
"""Test template update."""
templates_source.update(id="dummy", version="2.0.0")
files_before = {p: p.read_text() for p in client_with_template.template_files}
with client_database_injection_manager(client_with_template):
update_template(force=False, interactive=False, dry_run=False)
for file in client_with_template.template_files:
assert file.read_text() != files_before[file]
@pytest.mark.parametrize(
"action, content_type",
[
(FileAction.APPEND, "append"),
(FileAction.CREATE, "template"),
(FileAction.OVERWRITE, "template"),
(FileAction.RECREATE, "template"),
(FileAction.DELETED, "project"),
(FileAction.IGNORE_IDENTICAL, "project"),
(FileAction.IGNORE_UNCHANGED_REMOTE, "project"),
(FileAction.KEEP, "project"),
],
)
def test_copy_template_actions(client, rendered_template, action, content_type, client_database_injection_manager):
"""Test FileActions when copying a template."""
project_content = (client.path / "Dockerfile").read_text()
template_content = (rendered_template.path / "Dockerfile").read_text()
# NOTE: Ignore all other files expect the Dockerfile
actions = {f: FileAction.IGNORE_UNCHANGED_REMOTE for f in rendered_template.get_files()}
actions["Dockerfile"] = action
with client_database_injection_manager(client):
copy_template_to_client(
rendered_template=rendered_template, client=client, project=client.project, actions=actions
)
# NOTE: Make sure that files have some content
assert project_content
assert template_content
assert project_content != template_content
if content_type == "append":
expected_content = f"{project_content}\n{template_content}"
elif content_type == "template":
expected_content = template_content
else:
expected_content = project_content
assert expected_content == (client.path / "Dockerfile").read_text()
def test_get_file_actions_for_initialize(client, rendered_template, client_database_injection_manager):
"""Test getting file action when initializing."""
with client_database_injection_manager(client):
actions = get_file_actions(
rendered_template=rendered_template,
template_action=TemplateAction.INITIALIZE,
client=client,
interactive=False,
)
appended_file = ".gitignore"
assert FileAction.APPEND == actions[appended_file]
new_file = ".dummy"
assert FileAction.CREATE == actions[new_file]
existing_file = "Dockerfile"
assert FileAction.OVERWRITE == actions[existing_file]
kept_file = "README.md"
assert FileAction.KEEP == actions[kept_file]
def test_get_file_actions_for_set(client, rendered_template, client_database_injection_manager):
"""Test getting file action when setting a template."""
with client_database_injection_manager(client):
actions = get_file_actions(
rendered_template=rendered_template, template_action=TemplateAction.SET, client=client, interactive=False
)
new_file = ".dummy"
assert FileAction.CREATE == actions[new_file]
existing_file = "Dockerfile"
assert FileAction.OVERWRITE == actions[existing_file]
kept_file = "README.md"
assert FileAction.KEEP == actions[kept_file]
def test_get_file_actions_for_update(
client_with_template, rendered_template_with_update, client_database_injection_manager
):
"""Test getting file action when updating a template."""
with client_database_injection_manager(client_with_template):
actions = get_file_actions(
rendered_template=rendered_template_with_update,
template_action=TemplateAction.UPDATE,
client=client_with_template,
interactive=False,
)
identical_file = ".dummy"
assert FileAction.IGNORE_IDENTICAL == actions[identical_file]
remotely_modified = "Dockerfile"
assert FileAction.OVERWRITE == actions[remotely_modified]
def test_update_with_locally_modified_file(
client_with_template, rendered_template_with_update, client_database_injection_manager
):
"""Test a locally modified file that is remotely updated won't change."""
(client_with_template.path / "Dockerfile").write_text("Local modification")
with client_database_injection_manager(client_with_template):
actions = get_file_actions(
rendered_template=rendered_template_with_update,
template_action=TemplateAction.UPDATE,
client=client_with_template,
interactive=False,
)
assert FileAction.KEEP == actions["Dockerfile"]
def test_update_with_locally_deleted_file(
client_with_template, rendered_template_with_update, client_database_injection_manager
):
"""Test a locally deleted file that is remotely updated won't be re-created."""
(client_with_template.path / "Dockerfile").unlink()
with client_database_injection_manager(client_with_template):
actions = get_file_actions(
rendered_template=rendered_template_with_update,
template_action=TemplateAction.UPDATE,
client=client_with_template,
interactive=False,
)
assert FileAction.DELETED == actions["Dockerfile"]
@pytest.mark.parametrize("delete", [False, True])
def test_update_with_locally_changed_immutable_file(
client_with_template, rendered_template_with_update, client_database_injection_manager, delete
):
"""Test a locally deleted file that is remotely updated won't be re-created."""
if delete:
(client_with_template.path / "immutable.file").unlink()
else:
(client_with_template.path / "immutable.file").write_text("Locally modified immutable files")
with pytest.raises(
errors.TemplateUpdateError, match="Can't update template as immutable template file .* has local changes."
), client_database_injection_manager(client_with_template):
get_file_actions(
rendered_template=rendered_template_with_update,
template_action=TemplateAction.UPDATE,
client=client_with_template,
interactive=False,
)
|
harveywangdao/earth | elegant/test/hera/main_cookie.go | <reponame>harveywangdao/earth
package main
import (
"io"
"log"
"net/http"
"strings"
)
func main() {
http.HandleFunc("/", Cookie)
http.HandleFunc("/2", Cookie2)
err := http.ListenAndServe(":8090", nil)
if err != nil {
log.Fatal(err)
}
}
func Cookie(w http.ResponseWriter, r *http.Request) {
log.Print("111")
ck := &http.Cookie{
Name: "MyCookie",
Value: "hhell osss",
Path: "/",
//Domain: "localhost",
MaxAge: 120,
}
http.SetCookie(w, ck)
ck2, err := r.Cookie("MyCookie")
if err != nil {
io.WriteString(w, err.Error())
return
}
log.Print("222")
io.WriteString(w, ck2.Value)
}
func Cookie2(w http.ResponseWriter, r *http.Request) {
log.Print("111")
ck := &http.Cookie{
Name: "MyCookie",
Value: "hhelld fffo",
Path: "/",
//Domain: "localhost",
MaxAge: 120,
}
//http.SetCookie(w, ck)
w.Header().Set("Set-Cookie", strings.Replace(ck.String(), " ", "xx", -1))
ck2, err := r.Cookie("MyCookie")
if err != nil {
io.WriteString(w, err.Error())
return
}
log.Print("222")
io.WriteString(w, ck2.Value)
}
|
fangjinuo/langx | langx-java/src/main/java/com/jn/langx/pipeline/HeadHandlerContext.java | <filename>langx-java/src/main/java/com/jn/langx/pipeline/HeadHandlerContext.java
package com.jn.langx.pipeline;
public class HeadHandlerContext extends HandlerContext {
public HeadHandlerContext() {
super(NoopHandler.getInstance());
}
public HeadHandlerContext(Handler handler) {
super(handler);
}
@Override
public boolean hasPrev() {
return false;
}
}
|
rgiduthuri/NNEF-Tools | nnef_tools/io/nnef/nnef_io.py | # Copyright (c) 2017 The Khronos Group Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division, print_function, absolute_import
import os
import re
import shutil
import sys
import tempfile
import typing
from collections import OrderedDict
import nnef
import numpy as np
import six
from nnef_tools.core import utils
from nnef_tools.io.nnef import nnef_unifier
from nnef_tools.io.nnef.nnef_graph import *
from nnef_tools.io.nnef.parser_config import NNEFParserConfig
NNEFDTypeByNumpyDType = {
'float16': 'scalar',
'float32': 'scalar',
'float64': 'scalar',
'int8': 'integer',
'uint8': 'integer',
'int16': 'integer',
'uint16': 'integer',
'int32': 'integer',
'uint32': 'integer',
'int64': 'integer',
'uint64': 'integer',
'bool': 'logical',
}
NumpyDTypeByNNEFDType = {
'scalar': 'float32',
'integer': 'int32',
'logical': 'bool',
}
_InputShapeType = typing.Union[None,
typing.List[int],
typing.Tuple[typing.List[int]],
typing.Dict[str, typing.List[int]]]
def read(path, # type: str
parser_configs=None, # type: typing.Optional[typing.List[NNEFParserConfig]]
input_shape=None, # type: _InputShapeType
infer_shapes=True,
):
# type: (...)->NNEFGraph
if not (path.endswith('.tgz') or path.endswith('.nnef') or path.endswith('.txt') or os.path.isdir(path)):
raise utils.NNEFToolsException("Only .tgz or .nnef or .txt files or directories are supported")
parser_config = NNEFParserConfig.combine_configs(parser_configs if parser_configs else [])
path_to_load = None
compressed = False
try:
if os.path.isdir(path):
compressed = False
with_weights = True
path_to_load = path
elif path.endswith('.tgz'):
compressed = True
with_weights = True
path_to_load = tempfile.mkdtemp(prefix="nnef_")
utils.tgz_extract(path, path_to_load)
elif path.endswith('.nnef') or path.endswith('.txt'):
compressed = False
with_weights = False
path_to_load = path
else:
assert False
# If there are fragments in the graph and also in parser_config
# we remove the non-standard fragments from parser_config to avoid duplicate fragment definition
if parser_config.fragments:
re_graph = re.compile(r"^graph\s|\sgraph\s")
re_fragment = re.compile(r"^fragment\s|\sfragment\s")
graph_nnef_path = os.path.join(path_to_load, 'graph.nnef') if os.path.isdir(path_to_load) else path_to_load
with open(graph_nnef_path, 'r') as f:
while True:
line = f.readline()
if not line:
break
if re_fragment.search(line):
parser_config.fragments = NNEFParserConfig.STANDARD_CONFIG.fragments
break
if re_graph.search(line):
break
parser_graph = parser_config.load_graph(path_to_load)
if input_shape is not None:
if not isinstance(input_shape, (tuple, list, dict)):
raise utils.NNEFToolsException("input_shape must be list or dict")
for op in parser_graph.operations:
if op.name == 'external':
name = op.outputs['output']
if isinstance(input_shape, dict):
if name in input_shape:
op.attribs['shape'] = input_shape[name]
elif isinstance(input_shape, tuple):
op.attribs['shape'] = input_shape[parser_graph.inputs.index(name)]
else:
op.attribs['shape'] = input_shape
if infer_shapes:
parser_config.infer_shapes(parser_graph)
return _read(parser_graph=parser_graph, with_weights=with_weights)
finally:
if compressed and path_to_load:
shutil.rmtree(path_to_load)
def write(nnef_graph, # type: NNEFGraph
tgz_or_dir_path, # type: str
write_weights=True, # type: bool
raise_on_missing_weight=True, # type: bool
extensions=None, # type: typing.Optional[typing.List[str]]
fragments=None, # type: typing.Optional[str]
only_print_used_fragments=False, # type: bool
compression_level=0, # type: int
):
# type: (...) -> None
compressed = tgz_or_dir_path.endswith('.tgz')
dir_path = None
try:
if compressed:
dir_path = tempfile.mkdtemp(prefix="nnef_")
else:
dir_path = tgz_or_dir_path
if not os.path.exists(dir_path):
os.makedirs(dir_path)
with open(os.path.join(dir_path, "graph.nnef"), "w") as f:
_print(nnef_graph,
file_handle=f,
extensions=extensions,
fragments=fragments,
only_print_used_fragments=only_print_used_fragments)
if any(t.quantization is not None for t in nnef_graph.tensors):
with open(os.path.join(dir_path, "graph.quant"), "w") as f:
_print_quantization(nnef_graph, file_handle=f)
if write_weights:
_write_weights(nnef_graph, dir_path=dir_path, raise_on_missing_weight=raise_on_missing_weight)
if compressed:
utils.tgz_compress(dir_path, tgz_or_dir_path, compression_level=compression_level)
finally:
if compressed and dir_path:
shutil.rmtree(dir_path)
def _debug_print(nnef_graph, file_handle=None):
# type: (NNEFGraph, typing.Optional[typing.TextIO]) -> None
if file_handle is None:
file_handle = sys.stderr
_print(nnef_graph, file_handle=file_handle)
def _read(parser_graph, with_weights=True):
# type: (typing.Any, bool)->NNEFGraph
tensor_by_name = {}
g = NNEFGraph(name=parser_graph.name)
def add_to_tensor_by_name(tensor):
assert tensor.name not in tensor_by_name, "Tensor {} defined multiple times".format(tensor.name)
tensor_by_name[tensor.name] = tensor
def transform_input(input_):
if isinstance(input_, nnef.Identifier):
assert str(input_) in tensor_by_name, "Tensor {} not defined before use".format(str(input_))
return tensor_by_name[str(input_)]
else:
return NNEFTensor(graph=g,
name=None,
shape=[],
dtype=NNEFDTypeByNumpyDType[np.array(input_).dtype.name],
data=[input_])
def transform_result(result_):
if isinstance(result_, nnef.Identifier):
quantization = parser_graph.tensors[str(result_)].quantization
if quantization:
quantization = NNEFQuantization(name=quantization['op-name'], attribs=quantization)
del quantization.attribs['op-name']
else:
quantization = None
shape = parser_graph.tensors[str(result_)].shape
tensor = NNEFTensor(graph=g,
name=str(result_),
shape=list(shape) if shape is not None else None,
dtype=parser_graph.tensors[str(result_)].dtype,
quantization=quantization)
add_to_tensor_by_name(tensor)
return tensor
else:
return result_
for parser_op in parser_graph.operations:
inputs = utils.recursive_transform(parser_op.inputs, transform_input)
if any(isinstance(i, list) for i in six.itervalues(inputs)):
inputs = utils.recursive_collect(inputs)
else:
inputs = tuple(utils.recursive_collect(inputs))
outputs = utils.recursive_transform(parser_op.outputs, transform_result)
if any(isinstance(o, list) for o in six.itervalues(outputs)):
outputs = utils.recursive_collect(outputs)
else:
outputs = tuple(utils.recursive_collect(outputs))
if parser_op.name == "variable":
outputs[0].label = parser_op.attribs["label"]
if with_weights:
outputs[0].data = parser_graph.tensors[parser_op.outputs["output"]].data
assert outputs[0].data is not None
else:
outputs[0].data = np.array([], dtype=NumpyDTypeByNNEFDType[parser_op.dtype])
if parser_op.name == "constant":
outputs[0].data = parser_op.attribs["value"]
if parser_op.name not in ["external", "constant", "variable"]:
NNEFOperation(graph=g, name=parser_op.name, attribs=dict(parser_op.attribs), inputs=inputs, outputs=outputs,
dtype=parser_op.dtype)
input_tensors = []
for input_ in parser_graph.inputs:
assert str(input_) in tensor_by_name, "Input tensor {} was not declared".format(str(input_))
input_tensors.append(tensor_by_name[str(input_)])
output_tensors = []
for output_ in parser_graph.outputs:
assert str(output_) in tensor_by_name, "Output tensor {} was not declared".format(str(output_))
output_tensors.append(tensor_by_name[str(output_)])
g.inputs = OrderedDict((t.name, t) for t in input_tensors)
g.outputs = OrderedDict((t.name, t) for t in output_tensors)
g.generate_missing_names()
return g
def get_used_fragments(nnef_graph, fragments):
# type: (NNEFGraph, str)->str
ops = {op.name for op in nnef_graph.operations}
ops.update(tensor.quantization.name for tensor in nnef_graph.tensors if tensor.quantization)
fragment_list = [f.strip() for f in re.split(r"^fragment\s|\sfragment\s", fragments) if f.strip()]
used_fragment_list = []
for fragment in fragment_list:
fragment_name = fragment.split('(')[0].split('<')[0].strip()
if fragment_name in ops:
used_fragment_list.append(fragment)
if used_fragment_list:
return 'fragment ' + '\nfragment '.join(used_fragment_list)
return ""
def _print(nnef_graph, # type: NNEFGraph
file_handle, # type: typing.TextIO
extensions=None, # type: typing.Optional[typing.List[str]]
fragments=None, # type: typing.Optional[str]
only_print_used_fragments=False, # type: bool
):
# type: (...)->None
generate_source_operations(nnef_graph)
nnef_graph.sort()
try:
if extensions is None:
extensions = []
if fragments is None:
fragments = ""
if nnef_graph.fragments is not None:
if len(fragments) != 0:
fragments += '\n'
fragments += nnef_graph.fragments
fragments = add_tflite_quantization_fragment_if_needed(nnef_graph, fragments)
if only_print_used_fragments:
fragments = get_used_fragments(nnef_graph, fragments)
if fragments:
if "KHR_enable_fragment_definitions" not in extensions:
extensions.append("KHR_enable_fragment_definitions")
if "KHR_enable_operator_expressions" not in extensions:
extensions.append("KHR_enable_operator_expressions")
f = file_handle
indent = 4 * " "
print(nnef.format_version((1, 0)), file=f)
if extensions:
print(nnef.format_extensions(extensions), file=f)
if fragments:
print(file=f)
print(fragments, file=f)
print(file=f)
graph_name = _recursive_check_str(nnef_graph.name) if nnef_graph.name is not None else "network"
graph_inputs = _recursive_check_str([input_.name for input_ in nnef_graph.inputs])
graph_outputs = _recursive_check_str([output_.name for output_ in nnef_graph.outputs])
print("graph {}({}) -> ({})".format(graph_name, ', '.join(graph_inputs), ', '.join(graph_outputs)), file=f)
print("{", file=f)
for op in nnef_graph.operations:
inputs = _transform_inputs_before_print(list(op.inputs) if not isinstance(op.inputs, tuple) else op.inputs)
invocation = nnef.format_invocation(
name=_recursive_check_str(op.name),
attribs=_recursive_check_str(_sorted_ordered_dict(op.attribs)),
inputs=_recursive_check_str([inputs] if isinstance(inputs, list) else list(inputs)),
outputs=_recursive_check_str(
_result_to_identifiers(list(op.outputs) if not isinstance(op.outputs, tuple) else op.outputs)),
dtype=_recursive_check_str(op.dtype))
comment = " # {}".format(_recursive_check_str(op.comment)) if op.comment else ""
print("{}{};{}".format(indent, invocation, comment), file=f)
print("}", file=f)
finally:
remove_source_operations(nnef_graph)
def _print_quantization(nnef_graph, file_handle):
# type: (NNEFGraph, typing.TextIO)->None
for tensor in nnef_graph.tensors:
if tensor.quantization is None:
print('# "{}": not quantized'.format(tensor.name), file=file_handle)
else:
print('"{}": {}({});'.format(tensor.name,
tensor.quantization.name,
', '.join("{} = {}".format(k, v)
for k, v in sorted(six.iteritems(tensor.quantization.attribs)))),
file=file_handle)
def _write_weights(nnef_graph, dir_path, raise_on_missing_weight=True):
# type: (NNEFGraph, str, bool) -> None
if not os.path.exists(dir_path):
os.makedirs(dir_path)
for tensor in nnef_graph.tensors:
if tensor.is_variable:
if tensor.data.size == tensor.count:
write_nnef_tensor(filename=os.path.join(dir_path, tensor.label + ".dat"),
array=np.asarray(tensor.data, order='C'))
elif tensor.data.size == 0:
if raise_on_missing_weight:
raise utils.NNEFToolsException("Missing value for variable: {}".format(tensor.name))
else:
raise utils.NNEFToolsException("Invalid data size for variable: {}, expected: {}, got: {}".
format(tensor.name, tensor.count, tensor.data.size))
def read_nnef_tensor(filename):
with open(filename, "rb") as file:
return nnef.read_tensor(file)
def write_nnef_tensor(filename, array):
directory = os.path.dirname(filename)
if directory and not os.path.exists(directory):
os.makedirs(directory)
with open(filename, "wb") as file:
nnef.write_tensor(file=file, tensor=array)
def _transform_inputs_before_print(inputs):
def transform(input_):
if isinstance(input_, NNEFTensor):
if input_.is_constant and input_.rank == 0:
return input_.data[0]
else:
return nnef.Identifier(input_.name)
return input_
return utils.recursive_transform(inputs, transform)
def _sorted_ordered_dict(d):
return OrderedDict(sorted((k, v) for k, v in six.iteritems(d)))
def _result_to_identifiers(result):
def transform(result_):
assert isinstance(result_, NNEFTensor), "Results must be NNEF tensors, or lists/tuples of that."
return nnef.Identifier(result_.name)
result = utils.recursive_transform(result, transform)
if isinstance(result, nnef.Identifier) or isinstance(result, list):
return [result]
elif isinstance(result, tuple):
return list(result)
else:
assert False, "Unexpected result type: {}".format(type(result))
def _recursive_check_str(data):
if sys.version_info[0] < 3:
def check(arg):
# noinspection PyUnresolvedReferences
assert not isinstance(arg, unicode), \
"NNEF module does not accept unicode strings in python2. Use NNEFGraph with str only."
utils.recursive_visit(data, check)
return data
def generate_source_operations(nnef_graph, gen_for_rank0_also=False):
# type: (NNEFGraph, bool) -> None
# assert nnef_graph.is_unique
for t in list(nnef_graph.tensors):
if t.producer is None:
if t.is_constant:
if t.rank > 0 or t in nnef_graph.outputs or gen_for_rank0_also:
NNEFOperation(graph=nnef_graph,
name="constant",
attribs=dict(shape=t.shape, value=t.data),
inputs=tuple(),
outputs=t,
dtype=t.dtype)
elif t.is_variable:
NNEFOperation(graph=nnef_graph,
name="variable",
attribs=dict(shape=t.shape, label=t.label),
inputs=tuple(),
outputs=t,
dtype=t.dtype)
elif t.producer is None:
NNEFOperation(graph=nnef_graph,
name="external",
attribs=dict(shape=t.shape),
inputs=tuple(),
outputs=t,
dtype=t.dtype)
else:
assert False, "All non-source tensors must have a producer in an NNEF graph"
def remove_source_operations(nnef_graph):
# type: (NNEFGraph) -> None
# assert nnef_graph.is_unique
nnef_graph.remove_operations([op for op in list(nnef_graph.operations)
if op.name in {"constant", "variable", "external"}],
unlink=True)
TFLITE_QUANTIZATION_FRAGMENT = """\
fragment tflite_quantize(x: tensor<scalar>, min: scalar, max: scalar, scale: scalar, zero_point: integer, bits: integer)
-> ( y: tensor<scalar> )
{
rounded = round(x / scale + scalar(zero_point));
q = clamp(rounded, 0.0, 255.0) if bits == 8 else clamp(rounded, -2147483648.0, 2147483647.0);
y = (q - scalar(zero_point)) * scale;
}\
"""
def add_tflite_quantization_fragment_if_needed(nnef_graph, fragments):
# type:(NNEFGraph, str)->str
if (any(tensor.quantization is not None and tensor.quantization.name == "tflite_quantize"
and any(item != 0 for item in six.itervalues(tensor.quantization.attribs))
for tensor in nnef_graph.tensors)):
if fragments:
return fragments + '\n' + TFLITE_QUANTIZATION_FRAGMENT
else:
return TFLITE_QUANTIZATION_FRAGMENT
return fragments
class Reader(object):
def __init__(self, parser_configs=None, unify=False, input_shape=None, infer_shapes=True):
self._parser_configs = parser_configs
self._unify = unify
self._input_shape = input_shape
self._infer_shapes = infer_shapes
def __call__(self, filename):
g = read(filename, parser_configs=self._parser_configs, input_shape=self._input_shape, infer_shapes=self._infer_shapes)
if self._unify:
nnef_unifier.unify(g)
return g
class Writer(object):
def __init__(self,
write_weights=True,
extensions=None,
fragments=None,
only_print_used_fragments=False,
compression_level=0):
self._write_weights = write_weights
self._extensions = extensions
self._fragments = fragments
self._only_print_used_fragments = only_print_used_fragments
self._compression_level = compression_level
def __call__(self, graph, filename):
write(graph, filename,
write_weights=self._write_weights,
extensions=self._extensions,
fragments=self._fragments,
only_print_used_fragments=self._only_print_used_fragments,
compression_level=self._compression_level)
return None
|
Quernest/schedule-admin | client/src/components/Dashboard/Schedule/Week.js | import React from 'react';
import PropTypes from 'prop-types';
import { injectIntl, intlShape } from 'react-intl';
import parsers from '../../../helpers/parsers';
const { parseWeekTypes } = parsers;
const Week = ({
intl,
children,
className,
type,
}) => {
const { formatMessage } = intl;
return (
<div className={className} key={type}>
<h3 className="form__week-title">
{formatMessage({ id: parseWeekTypes(type) })} {formatMessage({ id: 'app.dashboard.semesters.form.editgroup.schedule.week' })}
</h3>
{children}
</div>
);
};
Week.defaultProps = {
className: 'form__week',
children: null,
};
Week.propTypes = {
intl: intlShape.isRequired,
type: PropTypes.number.isRequired,
className: PropTypes.string,
children: PropTypes.node,
};
export default injectIntl(Week);
|
AdrianKolbuk/WebService-for-driving-school | tin_projekt_Kolbuk_s17131/tin_s17131_react/src/components/other/MainContent.js | <filename>tin_projekt_Kolbuk_s17131/tin_s17131_react/src/components/other/MainContent.js
import React from 'react'
import { useTranslation } from 'react-i18next';
function MainContent() {
const { t } = useTranslation();
return (
<main>
<h2>{t('nav.main-page')}</h2>
<p>System informatyczny dla ośrodka szkolenia kierowców.<br></br>
Oferujemy szkolenia z jazdy samochodem w ekstremalnych warunkach, od przeszkolenia i
doświadczenia niebezpieczeństw drogowych, które mogą spotkać nas w codziennej jeździe samochodem, po naukę
driftu z profesjonalnym kierowcą rajdowym.
</p>
</main>
)
}
export default MainContent |
qwzhang01/lotus | lotus_common/src/main/java/com/lotus/common/entity/AjaxResult.java | package com.lotus.common.entity;
public class AjaxResult {
private String message;
private int errorCode = 0; // 0: normal , >=1 : error
private Object data;
public static AjaxResult success(String message) {
AjaxResult result = new AjaxResult();
result.setErrorCode(0);
result.setMessage(message);
return result;
}
public static AjaxResult success(String message, Object data) {
AjaxResult result = new AjaxResult();
result.setErrorCode(0);
result.setMessage(message);
result.setData(data);
return result;
}
public static AjaxResult error(String message) {
AjaxResult result = new AjaxResult();
result.setErrorCode(1);
result.setMessage(message);
return result;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public int getErrorCode() {
return errorCode;
}
public void setErrorCode(int errorCode) {
this.errorCode = errorCode;
}
public Object getData() {
return data;
}
public void setData(Object data) {
this.data = data;
}
}
|
stdbilly/CS_Note | mycode/cpp/OOD/TextQuery/Query.cc | <gh_stars>1-10
#pragma once
#include "Query.h"
#include <algorithm>
#include <iterator>
#include <memory>
#include <set>
#include <string>
#include <vector>
#include "TextQuery.h"
using namespace std;
QueryResult OrQuery::eval(const TextQuery& text) const {
//通过query的_lhs和_rhs进行的虚调用
auto right = _rhs.eval(text), left = _lhs.eval(text);
//将左侧运算对象的行号拷贝到结果set中
auto ret_lines = make_shared<set<line_no>>(left.begin(), left.end());
//插入右侧运算对象所得的行号
ret_lines->insert(right.begin(), right.end());
//返回一个新的QueryResult,表示_lhs和_rhs的并集
return QueryResult(rep(), ret_lines, left.getFile());
}
QueryResult AndQuery::eval(const TextQuery& text) const {
auto right = _rhs.eval(text), left = _lhs.eval(text);
auto ret_lines = make_shared<set<line_no>>();
//将两个范围的交集写入ret_lines指向的set中
set_intersection(left.begin(), left.end(), right.begin(), right.end(),
inserter(*ret_lines, ret_lines->begin()));
return QueryResult(rep(), ret_lines, left.getFile());
}
QueryResult NotQuery::eval(const TextQuery& text) const {
auto result = _query.eval(text);
auto ret_lines = make_shared<set<line_no>>();
auto beg = result.begin(), end = result.end();
auto sz = result.getFile()->size();
//遍历文件的所有行,将不在result中的行添加到ret_lines
for (size_t i = 0; i < sz; ++i) {
if (beg == end || *beg == i) {
ret_lines->insert(i);
} else if (beg != end) {
++beg;
}
}
return QueryResult(rep(), ret_lines, result.getFile());
} |
vicobits/django-wise | apps/accounts/serializers/token_serializer.py | # -*- coding: utf-8 -*-
from rest_framework import serializers
from rest_framework.serializers import Serializer
class TokenSerializer(Serializer):
"""Validates token existence."""
token = serializers.CharField()
class RefreshTokenSerializer(serializers.Serializer):
"""Validates refresh_token existence."""
refresh_token = serializers.CharField()
|
tusharchoudhary0003/Custom-Football-Game | sources/p005cm/aptoide/p006pt/dataprovider/p010ws/p013v7/home/ActionItemResponse.java | <gh_stars>1-10
package p005cm.aptoide.p006pt.dataprovider.p010ws.p013v7.home;
import p005cm.aptoide.p006pt.dataprovider.model.p009v7.BaseV7EndlessDataListResponse;
/* renamed from: cm.aptoide.pt.dataprovider.ws.v7.home.ActionItemResponse */
public class ActionItemResponse extends BaseV7EndlessDataListResponse<ActionItemData> {
}
|
rgomulin/aml_s905_uboot | u-boot/board/xilinx/zynq/legacy.c | <gh_stars>10-100
#warning usage of ps7_init files is deprecated please use ps7_init_gpl
|
AndreyShpilevoy/DemProject | DEM_MVC_UI/src/scripts/containers/Page_ViewForum/test.js | /*eslint no-undef: 'off'*/
/* eslint import/no-extraneous-dependencies: 'off' */
import React from 'react';
import {shallow} from 'enzyme';
import * as mockActions from 'actions/__mocks__/sharedFakeActions';
import {sharedFakeStore, validFakeStoreData} from 'store/__mocks__/sharedFakeStore';
import PageViewForum from './index';
describe('PageViewForum', () => {
function setup(mockConfigId) {
const props = {
store: sharedFakeStore(mockConfigId),
actions: mockActions,
params: {
forumId: 3
}
};
return shallow(<PageViewForum {...props}/>, { lifecycleExperimental: true });
}
it('should render "div"',() => {
const divElement = setup(0).find('div').first();
expect(divElement).toBeTruthy();
});
it('should get "chapterById" from "chapterReducer" and recieve expected result', () => {
expect(setup(1).prop('chapterItem')).toEqual(validFakeStoreData.chapterReducer.chapterById);
});
it('should get "chapterById" from "chapterReducer" and recieve expected "null"', () => {
expect(setup(0).prop('chapterItem')).toEqual(null);
});
it('should find "ChapterItem" component', () => {
const divElement = setup(1).shallow();
expect(divElement.find('ChapterItem').node.type.name).toEqual('ChapterItem');
});
it('should get "allTopics" from "topicReducer" and recieve expected result', () => {
expect(setup(1).prop('topicArray')).toEqual(validFakeStoreData.topicReducer.allTopics[0].topicArray);
});
it('should get "allTopics" from "topicReducer" and recieve "[]"', () => {
expect(setup(2).prop('topicArray')).toEqual([]);
});
it('should get "allTopics" from "topicReducer" and recieve []', () => {
expect(setup(0).prop('topicArray')).toEqual([]);
});
it('should find "TopicArray" component', () => {
const divElement = setup(0).shallow();
expect(divElement.find('TopicArray').node.type.name).toEqual('TopicArray');
});
});
|
vimofthevine/UnderBudget | src/ui/prefs/PrefsDialog.cpp | /*
* Copyright 2013 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Qt include(s)
#include <QtWidgets>
// UnderBudget include(s)
#include "ui/prefs/AppearanceSettings.hpp"
#include "ui/prefs/GeneralSettings.hpp"
#include "ui/prefs/PrefsDialog.hpp"
namespace ub {
//------------------------------------------------------------------------------
PrefsDialog::PrefsDialog(QWidget* parent)
: QDialog(parent)
{
// Set up setting category list and pages
list = new QListWidget(this);
list->setIconSize(QSize(32,32));
stack = new QStackedWidget(this);
connect(list, SIGNAL(currentRowChanged(int)),
stack, SLOT(setCurrentIndex(int)));
QHBoxLayout* pageLayout = new QHBoxLayout;
pageLayout->addWidget(list);
pageLayout->addWidget(stack, 1,
Qt::AlignTop | Qt::AlignLeft);
// Set up setting pages
add(":/icons/general", tr("General"), new GeneralSettings(this));
add(":/icons/appearance", tr("Appearance"), new AppearanceSettings(this));
// Make sure something is selected initially
list->setCurrentRow(0);
// Set list's width so it only uses the bare minimum required size
list->setFixedWidth(list->sizeHintForColumn(0)
+ (2 * list->frameWidth()));
// Add the setting pages to the dialog layout
QVBoxLayout* mainLayout = new QVBoxLayout;
mainLayout->addLayout(pageLayout);
#ifndef Q_OS_MAC // OS X prefs don't have any buttons
// Set up the buttons
QDialogButtonBox* buttons = new QDialogButtonBox(
#ifdef Q_OS_WIN // Windows uses OK/cancel/apply
QDialogButtonBox::Ok | QDialogButtonBox::Cancel | QDialogButtonBox ::Apply,
#else // Unix
QDialogButtonBox::Close,
#endif
Qt::Horizontal, this);
// Save changes to preferences
connect(buttons, SIGNAL(accepted()), this, SIGNAL(apply()));
connect(buttons->button(QDialogButtonBox::Apply), SIGNAL(clicked()),
this, SIGNAL(apply()));
// Reset all inputs if changes are rejected
connect(this, SIGNAL(rejected()), this, SIGNAL(reset()));
// Close the dialog in response to buttons
connect(buttons, SIGNAL(rejected()), this, SLOT(reject()));
connect(buttons, SIGNAL(accepted()), this, SLOT(accept()));
mainLayout->addWidget(buttons);
#endif // Q_OS_MAC
setLayout(mainLayout);
setWindowTitle(tr("Preferences"));
}
//------------------------------------------------------------------------------
void PrefsDialog::add(const QString& icon, const QString& title, QWidget* page)
{
connect(this, SIGNAL(apply()), page, SLOT(apply()));
connect(this, SIGNAL(reset()), page, SLOT(reset()));
QListWidgetItem* item = new QListWidgetItem(QIcon(icon), title, list);
list->addItem(item);
stack->addWidget(page);
}
}
|
jhtwong/Open-Quark | src/Utilities/org/openquark/util/xml/AttributeSetSerializer.java | /*
* Copyright (c) 2007 BUSINESS OBJECTS SOFTWARE LIMITED
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of Business Objects nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/*
* AttributeSetSerializer.java
* Created: Feb 11, 2005
* By: ksit
*/
package org.openquark.util.xml;
import java.util.ArrayList;
import java.util.List;
import org.openquark.util.attributes.Attribute;
import org.openquark.util.attributes.AttributeSet;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* This serializer serializes an AttributeSet as XML element. This serializer
* assumes that there exists another serializer for serializing and
* deserializing attribute values.
*/
public class AttributeSetSerializer implements XMLElementSerializer {
/** The "root" tag name of a color attribute node */
public static final String ROOT_TAG = "AttributeSet"; //$NON-NLS-1$
/**
* @see org.openquark.util.xml.XMLElementSerializer#loadFromElement(org.openquark.util.xml.XMLSerializationManager, org.w3c.dom.Element)
*/
public Object loadFromElement(XMLSerializationManager manager, Element element) {
List<Attribute> attributes = new ArrayList<Attribute>();
manager.loadFromChildElements(element, attributes);
return new AttributeSet(attributes);
}
/**
* @see org.openquark.util.xml.XMLElementSerializer#storeToElement(org.openquark.util.xml.XMLSerializationManager, org.w3c.dom.Element, java.lang.Object)
*/
public void storeToElement(XMLSerializationManager manager, Element element, Object value) {
AttributeSet attributes = (AttributeSet) value;
Document doc = element.getOwnerDocument();
Element attrElement = doc.createElement(ROOT_TAG);
element.appendChild(attrElement);
manager.storeToElement(attrElement, attributes.getAllAttributes());
}
} |
GreenLightSoftware/brochure-wsgi | brochure_wsgi/http_user_interface.py | <gh_stars>0
import json
from collections import defaultdict
from functools import partial
from typing import Callable, Optional, Dict
from brochure.brochure_user_interface import BrochureUserInterface
from brochure.values.basics import Basics
from brochure.values.contact_method import ContactMethodType
from brochure.values.section import Section
from jinja2 import Environment, PackageLoader, select_autoescape
from werkzeug.wrappers import Response
from brochure_wsgi.response_providers.exception_response_provider import ExceptionReponseProvider
from brochure_wsgi.response_providers.not_found_response_provider import NotFoundResponseProvider
from brochure_wsgi.response_providers.section_response_provider import SectionResponseProvider
class HTTPUserInterface(BrochureUserInterface):
def __init__(self,
section_response_provider: Callable[[Section, Basics], Response],
not_found_response_provider: Callable[[Basics], Response],
exception_response_provider: Callable[[Exception, Optional[Basics]], Response]) -> None:
self._response = None
self._section_response_provider = section_response_provider
self._not_found_response_provider = not_found_response_provider
self._exception_response_provider = exception_response_provider
super().__init__()
def get_response_provider(self) -> Response:
return self._response
def show_unknown_command(self, basics: Basics) -> None:
self._response = self._not_found_response_provider(basics)
def show_cover(self, cover_section: Section, basics: Basics) -> None:
self._response = self._section_response_provider(cover_section, basics)
def show_unexpected_exception(self, exception: Exception, basics: Optional[Basics]) -> None:
self._response = self._exception_response_provider(exception, basics)
class HTTPUserInterfaceProvider(object):
def __init__(self):
super().__init__()
html_template_provider = Environment(
loader=PackageLoader('brochure_wsgi', 'templates'),
autoescape=select_autoescape(('html',))
)
def html_serializer(body: str, status: int) -> Response:
return Response(body, mimetype="text/html", status=status)
def status_code_html_serializer_provider(status: int) -> Callable[[str], Response]:
return lambda body: html_serializer(body=body, status=status)
def json_serializer(body: str, status: int) -> Response:
return Response(body, mimetype="application/json", status=status)
def status_code_json_serializer_provider(status: int) -> Callable[[str], Response]:
return lambda body: json_serializer(body=body, status=status)
def basics_context_serializer(basics: Basics) -> Dict[str, Dict[str, str]]:
contact_method_dictionary = {}
if basics.contact_method.contact_method_type == ContactMethodType.EMAIL: # pragma nocover
contact_method_dictionary["display_name"] = "Email"
contact_method_dictionary["contact_method_type"] = "email"
contact_method_dictionary["value"] = basics.contact_method.value
# noinspection PyProtectedMember
context = {"enterprise": basics.enterprise._asdict(),
"contact_method": contact_method_dictionary}
return context
def section_context_serializer(section: Section, basics: Basics) -> Dict[str, Dict[str, str]]:
basics_dictionary = basics_context_serializer(basics)
# noinspection PyProtectedMember
context = {**basics_dictionary, **{"section": section._asdict()}}
return context
ok_html_serializer = status_code_html_serializer_provider(200)
not_found_html_serializer = status_code_html_serializer_provider(404)
ok_json_serializer = status_code_json_serializer_provider(200)
not_found_json_serializer = status_code_json_serializer_provider(404)
exception_json_serializer = status_code_json_serializer_provider(500)
index_template = html_template_provider.get_template("section.html")
not_found_template = html_template_provider.get_template("not_found.html")
exception_template = html_template_provider.get_template("exception.html")
section_response_html_provider = SectionResponseProvider(template=index_template,
section_context_serializer=section_context_serializer,
response_serializer=ok_html_serializer)
not_found_response_html_provider = NotFoundResponseProvider(
template=not_found_template,
basics_context_serializer=basics_context_serializer,
response_serializer=not_found_html_serializer)
exception_response_html_provider = ExceptionReponseProvider(
template=exception_template,
basics_context_serializer=basics_context_serializer,
response_serializer=html_serializer)
def section_response_json_provider(section: Section, basics: Basics) -> Response:
section_dictionary = section_context_serializer(section, basics)
return ok_json_serializer(json.dumps(section_dictionary))
def not_found_response_json_provider(basics: Basics, path: str) -> Response:
dictionary = basics_context_serializer(basics)
dictionary["error"] = "Resource '{}' not found.".format(path)
return not_found_json_serializer(json.dumps(dictionary))
def exception_response_json_provider(exception: Exception, basics: Optional[Basics]) -> Response:
dictionary = basics_context_serializer(basics)
dictionary["error"] = str(exception)
return exception_json_serializer(json.dumps(dictionary))
def html_response_provider(path: str) -> HTTPUserInterface:
not_found_response_provider = partial(not_found_response_html_provider, **{"path": path})
return HTTPUserInterface(section_response_provider=section_response_html_provider,
not_found_response_provider=not_found_response_provider,
exception_response_provider=exception_response_html_provider)
def json_interface_provider(path: str) -> HTTPUserInterface:
return HTTPUserInterface(
section_response_provider=section_response_json_provider,
not_found_response_provider=partial(not_found_response_json_provider, **{"path": path}),
exception_response_provider=exception_response_json_provider)
self._accept_map = defaultdict(lambda: html_response_provider)
self._accept_map["application/json"] = json_interface_provider
def __call__(self, path: str, accept: Optional[str]) -> HTTPUserInterface:
return self._accept_map[accept](path)
|
werminghoff/Provenance | Cores/PicoDrive/platform/gp2x/warm.h | /*
* wARM - exporting ARM processor specific privileged services to userspace
* library functions
*
* Copyright (c) Gražvydas "notaz" Ignotas, 2009
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the organization nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef __WARM_H__
#define __WARM_H__ 1
/* cache operations (warm_cache_op_*):
* o clean - write dirty data to memory, but also leave in cache.
* o invalidate - throw away everything in cache, losing dirty data.
*
* Write buffer is always drained, no ops will only drain WB
*/
#define WOP_D_CLEAN (1 << 0)
#define WOP_D_INVALIDATE (1 << 1)
#define WOP_I_INVALIDATE (1 << 2)
/* change C and B bits (warm_change_cb_*)
* if is_set in not zero, bits are set, else cleared.
* the address for range function is virtual address.
*/
#define WCB_C_BIT (1 << 0)
#define WCB_B_BIT (1 << 1)
#ifndef __ASSEMBLER__
#ifdef __cplusplus
extern "C"
{
#endif
int warm_init(void);
int warm_cache_op_range(int ops, void *virt_addr, unsigned long size);
int warm_cache_op_all(int ops);
int warm_change_cb_upper(int cb, int is_set);
int warm_change_cb_range(int cb, int is_set, void *virt_addr, unsigned long size);
unsigned long warm_virt2phys(const void *ptr);
void warm_finish(void);
#ifdef __cplusplus
}
#endif
/* internal */
#ifdef WARM_CODE
#include <linux/ioctl.h>
#define WARM_IOCTL_BASE 'A'
struct warm_cache_op
{
unsigned long addr;
unsigned long size;
int ops;
};
struct warm_change_cb
{
unsigned long addr;
unsigned long size;
int cb;
int is_set;
};
#define WARMC_CACHE_OP _IOW(WARM_IOCTL_BASE, 0, struct warm_cache_op)
#define WARMC_CHANGE_CB _IOW(WARM_IOCTL_BASE, 1, struct warm_change_cb)
#define WARMC_VIRT2PHYS _IOWR(WARM_IOCTL_BASE, 2, unsigned long)
#endif /* WARM_CODE */
#endif /* !__ASSEMBLER__ */
#endif /* __WARM_H__ */
|
katalysteducation/cnx-designer | test/handlers/exercise/enter-in-solution.js | /** @jsx h */
import { Editor, Element, Transforms } from 'slate'
export default (input, editor) => {
input.break().break()
Transforms.select(editor, Editor.end(editor, Editor.above(editor, { match: Element.isElement })[1]))
input.break().break().break()
}
export const input = <editor>
<exercise>
<exproblem>
<p>Problem</p>
</exproblem>
<exsolution>
<p>Sol<cursor/>ution</p>
</exsolution>
</exercise>
</editor>
export const output = <editor>
<exercise>
<exproblem>
<p>Problem</p>
</exproblem>
<exsolution>
<p>Sol</p>
</exsolution>
<exsolution>
<p>ution</p>
</exsolution>
</exercise>
<p><text><cursor/></text></p>
</editor>
|
rrdrake/vvtest | trig/svnemail.py | <gh_stars>1-10
#!/usr/bin/env python
import sys
sys.dont_write_bytecode = True
sys.excepthook = sys.__excepthook__
import os
import time
import signal
from mailmessage import Message, get_current_user_name
SEND_MAIL_TIMEOUT = 30
DEFAULT_SMTPHOSTS = ['smtp.sandia.gov','localhost']
EMAIL_DOMAIN = 'sandia.gov'
class CommitEmailComposer:
def __init__(self, cmt):
""
self.cmt = cmt
self.msg = None
def compose(self, recipients=None, subject=None):
""
self.msg = Message()
addr = self.cmt.getAuthor()
if '@' not in addr:
addr += '@'+EMAIL_DOMAIN
self.msg.set( sendaddr=addr )
if subject:
sbj = subject
else:
reponame = os.path.basename( self.cmt.getRepoURL() )
sbj = '['+reponame+':'+self.cmt.getBranch()+'] ' + \
self.cmt.getShortMessage()
self.msg.set( subject=sbj )
if recipients:
recv = recipients
else:
usr = get_current_user_name()
recv = usr+'@'+EMAIL_DOMAIN
self.msg.set( recvaddrs=recv )
self.msg.setContent( self.cmt.asMultiLineString() )
def send(self, smtp_hosts=None, debug=False):
""
return send_message( self.msg, smtp_hosts, debug )
# use signals to implement a timeout mechanism
class TimeoutException(Exception): pass
def timeout_handler( signum, frame ):
raise TimeoutException( "timeout" )
def send_message( msg, smtp_hosts, debug ):
""
rtn = None
if smtp_hosts:
hosts = smtp_hosts
else:
hosts = DEFAULT_SMTPHOSTS
prev = signal.signal( signal.SIGALRM, timeout_handler )
timeout = int( SEND_MAIL_TIMEOUT * 0.9 )
signal.alarm( SEND_MAIL_TIMEOUT )
try:
if debug:
rtn = msg.send( smtphosts=hosts, timeout=timeout, smtpclass=None )
else:
msg.send( smtphosts=hosts, timeout=timeout )
finally:
signal.alarm(0)
signal.signal( signal.SIGALRM, prev )
return rtn
|
dbadari/fiscalizer | spec/spec_helper.rb | require 'fiscalizer'
require 'pathname'
require 'pry'
require 'securerandom'
require 'webmock/rspec'
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
root_path = Pathname.new(File.expand_path('../', File.dirname(__FILE__)))
Dir[root_path.join('spec/support/**/*.rb')].each { |f| require f }
public
def get_response_file(file)
"spec/support/responses/#{file}"
end
|
pramulkant/https-github.com-android-art-intel-marshmallow | art-extension/opttests/src/OptimizationTests/regression/test183046/Main.java | /*
* Copyright (C) 2015 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by Dalvik Fuzzer tool (3.2.001). Sat Mar 29 20:31:04 2014
package OptimizationTests.regression.test183046;
import OptimizationTests.regression.shared.*;
class UserDefinedException extends RuntimeException {
public int field;
}
public class Main extends FuzzerUtils {
public static final int N = 500;
public static int eu7 = -21933, vt4[] = new int[N];
static {
init(vt4, -48312);
}
public static void main(String[] zs9) {
double b3o = -109.327;
int mcm = 8006, t = 32268, xht = -39908, pc = 47331, gw = -11235, gyc = -53693, sn = -22447, ikw = -15597, wn = -47914, zc = 49010, v = -24274, gdj = -24595;
short mzy = 27264, f = 29439;
long k = 992388804570515873L, n[] = new long[N], m[] = new long[N], nyd[] = new long[N], yvw[] = new long[N];
byte thr = 105, li = 26;
boolean rn = true, ve = false;
float uwe = -76.878F, yac = -118.933F;
init(n, 2754103120434584284L);
init(m, 1212915630763605963L);
init(nyd, 3311949261514641211L);
init(yvw, 2850231961447983563L);
try {
mcm = 449;
while (--mcm > 0) {
for (t = 10; t < 276; t++) {
if (rn != (rn = ve))
break;
n[t - 1] = (long) uwe;
xht -= vt4[t - 1];
for (pc = 12; pc < 210; pc += 3) {
m[mcm] *= ((thr++) - (vt4[t] += (xht++)));
}
}
}
for (gw = 4; gw < 473; gw++) {
m[gw - 1] = vt4[gw];
for (gyc = 1; gyc < gw; ++gyc) {
for (sn = 120; sn > 1; --sn) {
li += (byte) (((mzy << 31170) - ikw) - ((-wn) / (++vt4[gyc + 1])));
}
wn = (int) ((-(24661 - (mcm / zc))) / b3o);
eu7 *= (int) ((++k) - ((-(mzy + -55952)) + (pc * wn)));
}
}
} catch (ArithmeticException fmc) {
f >>= (short) (((nyd[(49550 >>> 1) % N] <<= f) + (v / ((long) uwe | 1))) / ((--k) | 1));
} catch (UserDefinedException dk4) {
yvw[(-46562 >>> 1) % N] = (long) (gdj - ((sn - b3o) * (k - yac)));
}
System.out.println("mcm t rn = " + mcm + "," + t + "," + rn);
System.out.println("ve uwe xht = " + ve + ","
+ Float.floatToIntBits(uwe) + "," + xht);
System.out.println("pc thr gw = " + pc + "," + thr + "," + gw);
System.out.println("gyc sn li = " + gyc + "," + sn + "," + li);
System.out.println("mzy ikw wn = " + mzy + "," + ikw + "," + wn);
System.out.println("zc b3o k = " + zc + ","
+ Double.doubleToLongBits(b3o) + "," + k);
System.out.println("f v gdj = " + f + "," + v + "," + gdj);
System.out.println("yac n m = " + Float.floatToIntBits(yac) + ","
+ checkSum(n) + "," + checkSum(m));
System.out.println("nyd yvw = " + checkSum(nyd) + "," + checkSum(yvw));
System.out.println("eu7 vt4 = " + eu7 + "," + checkSum(vt4));
}
}
|
thejohnfreeman/rambda | modules/reject.js | import filter from './filter'
export default function reject (fn, arr) {
if (arr === undefined) {
return arrHolder => reject(fn, arrHolder)
}
return filter(x => !fn(x), arr)
}
|
apihackers/wapps | wapps/factories/category.py | <filename>wapps/factories/category.py
import factory
from wapps.models import Category
class CategoryFactory(factory.DjangoModelFactory):
name = factory.Sequence(lambda n: 'Category {0}'.format(n))
class Meta:
model = Category
|
santosh653/interproscan | core/io/src/main/java/uk/ac/ebi/interpro/scan/io/match/hmmer/hmmer3/parsemodel/DomainMatch.java | package uk.ac.ebi.interpro.scan.io.match.hmmer.hmmer3.parsemodel;
import java.io.Serializable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Provides a match for a Domain line in hmmsearch output format.
*
* @author <NAME>
* @version $Id$
* @since 1.0-SNAPSHOT
*/
public class DomainMatch implements Serializable {
/**
* Group[1] Score (float)
* Group[2] Bias (float)
* Group[3] c-Evalue (float)
* Group[4] i-Evalue (float)
* Group[5] hmm from (int)
* Group[6] hmm to (int)
* Group[7] hmmbounds, e.g. "[]"
* Group[8] aliFrom (int)
* Group[9] aliTo (int)
* Group[10] envFrom (int)
* Group[11] envTo (int)
* Group[12] acc (float)
*/
public static final Pattern DOMAIN_LINE_PATTERN = Pattern.compile("^\\s+(\\d+)\\s+[!?]\\s+(\\S+)\\s+(\\S+)\\s+(\\S+)\\s+(\\S+)\\s+(\\d+)\\s+(\\d+)\\s+(\\S+)\\s+(\\d+)\\s+(\\d+)\\s+\\S+\\s+(\\d+)\\s+(\\d+)\\s+\\S+\\s+(\\S+).*$");
//entered by Manjula
public static final Pattern DOMAIN_ALIGNMENT_LINE_PATTERN = Pattern.compile("^\\s+==\\s+domain\\s+(\\d+)\\s+.*$");
// TODO: This pattern won't work for UniProt FASTA files because assumes sequence ID contains numbers
// TODO: and letters only, but UniProt FASTA ID lines contain "|", for example "tr|Q9U4N3|Q9U4N3_TOXGO"
public static final Pattern ALIGNMENT_SEQUENCE_PATTERN = Pattern.compile("^\\s+(\\w+)\\s+(\\S+)\\s+([-a-zA-Z]+)\\s+(\\S+)\\s*$");
//entered by Manjula for Gene3D parser
//private final int domainNumber;
private final double score;
private final double bias;
private final double cEvalue;
private final double iEvalue;
private final int hmmfrom;
private final int hmmto;
private final String hmmBounds;
private final int aliFrom;
private final int aliTo;
private final int envFrom;
private final int envTo;
private final double acc;
private String alignment;
public DomainMatch(Matcher domainLineMatcher) {
this.score = Double.parseDouble(domainLineMatcher.group(2));
this.bias = Double.parseDouble(domainLineMatcher.group(3));
this.cEvalue = Double.parseDouble(domainLineMatcher.group(4));
this.iEvalue = Double.parseDouble(domainLineMatcher.group(5));
this.hmmfrom = Integer.parseInt(domainLineMatcher.group(6));
this.hmmto = Integer.parseInt(domainLineMatcher.group(7));
this.hmmBounds = domainLineMatcher.group(8);
this.aliFrom = Integer.parseInt(domainLineMatcher.group(9));
this.aliTo = Integer.parseInt(domainLineMatcher.group(10));
this.envFrom = Integer.parseInt(domainLineMatcher.group(11));
this.envTo = Integer.parseInt(domainLineMatcher.group(12));
this.acc = Double.parseDouble(domainLineMatcher.group(13));
}
public DomainMatch(SequenceDomainMatch sequenceDomainMatch) {
this.score = sequenceDomainMatch.getScore();
this.bias = sequenceDomainMatch.getBias();
this.cEvalue = sequenceDomainMatch.getCEvalue();
this.iEvalue = sequenceDomainMatch.getIEvalue();
this.hmmfrom = sequenceDomainMatch.getHmmfrom();
this.hmmto = sequenceDomainMatch.getHmmto();
this.hmmBounds = sequenceDomainMatch.getHmmBounds();
this.aliFrom = sequenceDomainMatch.getAliFrom();
this.aliTo = sequenceDomainMatch.getAliTo();
this.envFrom = sequenceDomainMatch.getEnvFrom();
this.envTo = sequenceDomainMatch.getEnvTo();
this.acc = sequenceDomainMatch.getAcc();
}
public String getAlignment() {
return alignment;
}
public double getScore() {
return score;
}
public double getBias() {
return bias;
}
public double getCEvalue() {
return cEvalue;
}
public double getIEvalue() {
return iEvalue;
}
public int getHmmfrom() {
return hmmfrom;
}
public int getHmmto() {
return hmmto;
}
public String getHmmBounds() {
return hmmBounds;
}
public int getAliFrom() {
return aliFrom;
}
public int getAliTo() {
return aliTo;
}
public int getEnvFrom() {
return envFrom;
}
public int getEnvTo() {
return envTo;
}
public double getAcc() {
return acc;
}
public void setAlignment(String alignment) {
this.alignment = alignment;
}
@Override
public String toString() {
return "DomainMatch{" +
"score=" + score +
", bias=" + bias +
", cEvalue=" + cEvalue +
", iEvalue=" + iEvalue +
", hmmfrom=" + hmmfrom +
", hmmto=" + hmmto +
", hmmBounds='" + hmmBounds + '\'' +
", aliFrom=" + aliFrom +
", aliTo=" + aliTo +
", envFrom=" + envFrom +
", envTo=" + envTo +
", acc=" + acc +
", alignment='" + alignment + '\'' +
'}';
}
}
|
kdoomsday/kaminalapp | test/daos/doobie/UserDaoDoobieSpec.scala | package daos.doobie
import doobie.specs2.imports.AnalysisSpec
import org.specs2.mutable.Specification
import testutil.TestUtil
/** Pruebas para los queries de UserDaoDoobie */
object UserDaoDoobieSpec extends Specification with AnalysisSpec {
val transactor = TestUtil.transactor()
check(UserDaoDoobie.userIdQuery(0L))
check(UserDaoDoobie.userLoginQuery(""))
check(UserDaoDoobie.setConnected(""))
check(UserDaoDoobie.qUsersByRole(""))
check(UserDaoDoobie.qCrearUsuario("", "", 0, ""))
check(UserDaoDoobie.qCambiarClave(0L, "", 0))
check(UserDaoDoobie.qUserRole(""))
}
|
dpukhkaiev/BRISE2 | worker/worker_tools/__init__.py | __all__ = [
"reflective_worker_method_import",
"splitter"
]
|
WhatAboutGaming/pyramid-waggle | server/util/tokens.js | const sodium = require("sodium").api;
const SESSION_KEY_LENGTH = 80;
const CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
var acceptedTokens = [];
const rand = function() {
return sodium.randombytes_random() / 0xffffffff;
};
const addToAcceptedTokens = function(token) {
if (token) {
acceptedTokens = [ ...acceptedTokens, token ];
}
};
const isAnAcceptedToken = function(token) {
return acceptedTokens.indexOf(token) >= 0;
};
const clearAcceptedTokens = function() {
acceptedTokens = [];
};
const generateToken = function(length = SESSION_KEY_LENGTH) {
var out = "";
for (var i = 0; i < length; i++) {
out += CHARS[Math.round(rand() * (CHARS.length - 1))];
}
return out;
};
const generateAcceptedToken = function(length = SESSION_KEY_LENGTH) {
const token = generateToken(length);
addToAcceptedTokens(token);
return token;
};
module.exports = {
addToAcceptedTokens,
clearAcceptedTokens,
generateAcceptedToken,
generateToken,
isAnAcceptedToken
};
|
atul-vyshnav/2021_IBM_Code_Challenge_StockIT | src/StockIT-v1-release_source_from_JADX/sources/com/google/android/gms/ads/internal/overlay/AdOverlayInfoParcel.java | <reponame>atul-vyshnav/2021_IBM_Code_Challenge_StockIT<gh_stars>1-10
package com.google.android.gms.ads.internal.overlay;
import android.content.Intent;
import android.os.Bundle;
import android.os.IBinder;
import android.os.Parcel;
import android.os.Parcelable;
import com.google.android.gms.ads.internal.zzi;
import com.google.android.gms.common.internal.ReflectedParcelable;
import com.google.android.gms.common.internal.safeparcel.AbstractSafeParcelable;
import com.google.android.gms.common.internal.safeparcel.SafeParcelWriter;
import com.google.android.gms.dynamic.IObjectWrapper;
import com.google.android.gms.dynamic.ObjectWrapper;
import com.google.android.gms.internal.ads.zzagt;
import com.google.android.gms.internal.ads.zzagv;
import com.google.android.gms.internal.ads.zzbbx;
import com.google.android.gms.internal.ads.zzbgj;
import com.google.android.gms.internal.ads.zzux;
/* compiled from: com.google.android.gms:play-services-ads@@19.4.0 */
public final class AdOverlayInfoParcel extends AbstractSafeParcelable implements ReflectedParcelable {
public static final Parcelable.Creator<AdOverlayInfoParcel> CREATOR = new zzn();
public final int orientation;
public final String url;
public final zzbbx zzbpe;
public final zzux zzcgv;
public final zzagt zzdep;
public final zzagv zzdeq;
public final zzbgj zzdgy;
public final zzb zzdpl;
public final zzp zzdpm;
public final String zzdpn;
public final boolean zzdpo;
public final String zzdpp;
public final zzv zzdpq;
public final int zzdpr;
public final String zzdps;
public final zzi zzdpt;
public static void zza(Intent intent, AdOverlayInfoParcel adOverlayInfoParcel) {
Bundle bundle = new Bundle(1);
bundle.putParcelable("com.google.android.gms.ads.inernal.overlay.AdOverlayInfo", adOverlayInfoParcel);
intent.putExtra("com.google.android.gms.ads.inernal.overlay.AdOverlayInfo", bundle);
}
public static AdOverlayInfoParcel zzd(Intent intent) {
try {
Bundle bundleExtra = intent.getBundleExtra("com.google.android.gms.ads.inernal.overlay.AdOverlayInfo");
bundleExtra.setClassLoader(AdOverlayInfoParcel.class.getClassLoader());
return (AdOverlayInfoParcel) bundleExtra.getParcelable("com.google.android.gms.ads.inernal.overlay.AdOverlayInfo");
} catch (Exception unused) {
return null;
}
}
public AdOverlayInfoParcel(zzux zzux, zzp zzp, zzv zzv, zzbgj zzbgj, int i, zzbbx zzbbx, String str, zzi zzi, String str2, String str3) {
this.zzdpl = null;
this.zzcgv = null;
this.zzdpm = zzp;
this.zzdgy = zzbgj;
this.zzdep = null;
this.zzdeq = null;
this.zzdpn = str2;
this.zzdpo = false;
this.zzdpp = str3;
this.zzdpq = null;
this.orientation = i;
this.zzdpr = 1;
this.url = null;
this.zzbpe = zzbbx;
this.zzdps = str;
this.zzdpt = zzi;
}
public AdOverlayInfoParcel(zzux zzux, zzp zzp, zzv zzv, zzbgj zzbgj, boolean z, int i, zzbbx zzbbx) {
this.zzdpl = null;
this.zzcgv = zzux;
this.zzdpm = zzp;
this.zzdgy = zzbgj;
this.zzdep = null;
this.zzdeq = null;
this.zzdpn = null;
this.zzdpo = z;
this.zzdpp = null;
this.zzdpq = zzv;
this.orientation = i;
this.zzdpr = 2;
this.url = null;
this.zzbpe = zzbbx;
this.zzdps = null;
this.zzdpt = null;
}
public AdOverlayInfoParcel(zzux zzux, zzp zzp, zzagt zzagt, zzagv zzagv, zzv zzv, zzbgj zzbgj, boolean z, int i, String str, zzbbx zzbbx) {
this.zzdpl = null;
this.zzcgv = zzux;
this.zzdpm = zzp;
this.zzdgy = zzbgj;
this.zzdep = zzagt;
this.zzdeq = zzagv;
this.zzdpn = null;
this.zzdpo = z;
this.zzdpp = null;
this.zzdpq = zzv;
this.orientation = i;
this.zzdpr = 3;
this.url = str;
this.zzbpe = zzbbx;
this.zzdps = null;
this.zzdpt = null;
}
public AdOverlayInfoParcel(zzux zzux, zzp zzp, zzagt zzagt, zzagv zzagv, zzv zzv, zzbgj zzbgj, boolean z, int i, String str, String str2, zzbbx zzbbx) {
this.zzdpl = null;
this.zzcgv = zzux;
this.zzdpm = zzp;
this.zzdgy = zzbgj;
this.zzdep = zzagt;
this.zzdeq = zzagv;
this.zzdpn = str2;
this.zzdpo = z;
this.zzdpp = str;
this.zzdpq = zzv;
this.orientation = i;
this.zzdpr = 3;
this.url = null;
this.zzbpe = zzbbx;
this.zzdps = null;
this.zzdpt = null;
}
public AdOverlayInfoParcel(zzb zzb, zzux zzux, zzp zzp, zzv zzv, zzbbx zzbbx) {
this.zzdpl = zzb;
this.zzcgv = zzux;
this.zzdpm = zzp;
this.zzdgy = null;
this.zzdep = null;
this.zzdeq = null;
this.zzdpn = null;
this.zzdpo = false;
this.zzdpp = null;
this.zzdpq = zzv;
this.orientation = -1;
this.zzdpr = 4;
this.url = null;
this.zzbpe = zzbbx;
this.zzdps = null;
this.zzdpt = null;
}
AdOverlayInfoParcel(zzb zzb, IBinder iBinder, IBinder iBinder2, IBinder iBinder3, IBinder iBinder4, String str, boolean z, String str2, IBinder iBinder5, int i, int i2, String str3, zzbbx zzbbx, String str4, zzi zzi, IBinder iBinder6) {
this.zzdpl = zzb;
this.zzcgv = (zzux) ObjectWrapper.unwrap(IObjectWrapper.Stub.asInterface(iBinder));
this.zzdpm = (zzp) ObjectWrapper.unwrap(IObjectWrapper.Stub.asInterface(iBinder2));
this.zzdgy = (zzbgj) ObjectWrapper.unwrap(IObjectWrapper.Stub.asInterface(iBinder3));
this.zzdep = (zzagt) ObjectWrapper.unwrap(IObjectWrapper.Stub.asInterface(iBinder6));
this.zzdeq = (zzagv) ObjectWrapper.unwrap(IObjectWrapper.Stub.asInterface(iBinder4));
this.zzdpn = str;
this.zzdpo = z;
this.zzdpp = str2;
this.zzdpq = (zzv) ObjectWrapper.unwrap(IObjectWrapper.Stub.asInterface(iBinder5));
this.orientation = i;
this.zzdpr = i2;
this.url = str3;
this.zzbpe = zzbbx;
this.zzdps = str4;
this.zzdpt = zzi;
}
public final void writeToParcel(Parcel parcel, int i) {
int beginObjectHeader = SafeParcelWriter.beginObjectHeader(parcel);
SafeParcelWriter.writeParcelable(parcel, 2, this.zzdpl, i, false);
SafeParcelWriter.writeIBinder(parcel, 3, ObjectWrapper.wrap(this.zzcgv).asBinder(), false);
SafeParcelWriter.writeIBinder(parcel, 4, ObjectWrapper.wrap(this.zzdpm).asBinder(), false);
SafeParcelWriter.writeIBinder(parcel, 5, ObjectWrapper.wrap(this.zzdgy).asBinder(), false);
SafeParcelWriter.writeIBinder(parcel, 6, ObjectWrapper.wrap(this.zzdeq).asBinder(), false);
SafeParcelWriter.writeString(parcel, 7, this.zzdpn, false);
SafeParcelWriter.writeBoolean(parcel, 8, this.zzdpo);
SafeParcelWriter.writeString(parcel, 9, this.zzdpp, false);
SafeParcelWriter.writeIBinder(parcel, 10, ObjectWrapper.wrap(this.zzdpq).asBinder(), false);
SafeParcelWriter.writeInt(parcel, 11, this.orientation);
SafeParcelWriter.writeInt(parcel, 12, this.zzdpr);
SafeParcelWriter.writeString(parcel, 13, this.url, false);
SafeParcelWriter.writeParcelable(parcel, 14, this.zzbpe, i, false);
SafeParcelWriter.writeString(parcel, 16, this.zzdps, false);
SafeParcelWriter.writeParcelable(parcel, 17, this.zzdpt, i, false);
SafeParcelWriter.writeIBinder(parcel, 18, ObjectWrapper.wrap(this.zzdep).asBinder(), false);
SafeParcelWriter.finishObjectHeader(parcel, beginObjectHeader);
}
}
|
ronistone/SpaceInvaders | core/src/com/space/invaders/models/Touchable.java | package com.space.invaders.models;
public interface Touchable {
public boolean isTouch(float x, float y);
public void doAction();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.