repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
Lokideos/hero_rider | app/services/watcher/correct_game_trophies_service.rb | <reponame>Lokideos/hero_rider
# frozen_string_literal: true
module Watcher
class CorrectGameTrophiesService
prepend BasicService
option :player
option :token
option :game
option :new_trophy_ids
option :client, default: proc {
PsnService::V2::HttpClient.new(url: Settings.psn.v2.trophies.url)
}
def call
game_trophies_list = @client.request_game_trophy_list(
token: @token, game_id: @game.trophy_service_id,
trophy_service_source: @game.trophy_service_source
)
additional_trophies_info = @client.request_game_player_trophies(
user_id: @player.trophy_user_id, token: @token, game_id: @game.trophy_service_id,
trophy_service_source: @game.trophy_service_source
)
trophies_list = merge_trophies(game_trophies_list, additional_trophies_info)
new_trophies = trophies_list.select do |trophy|
@new_trophy_ids.include? trophy['trophyId']
end
Workers::ProcessProgressesUpdate.perform_async(@game.id)
new_trophies.each do |trophy|
@game.add_trophy(Trophy.create(trophy_name: trophy['trophyName'],
trophy_service_id: trophy['trophyId'],
trophy_description: trophy['trophyDetail'],
trophy_type: trophy['trophyType'],
trophy_icon_url: trophy['trophyIconUrl'],
trophy_small_icon_url: trophy['trophyIconUrl'],
trophy_earned_rate: trophy['trophyEarnedRate'],
trophy_rare: trophy['trophyRare']))
end
end
private
def merge_trophies(game_trophies, trophies_info)
game_trophies.map do |trophy|
trophy.merge(
trophies_info.find { |trophy_info| trophy_info['trophyId'] == trophy['trophyId'] }
)
end
end
end
end
|
UCLA-SEAL/JShrink | code/jshrink/jshrink-lib/src/test/resources/junit4/src/test/java/org/junit/tests/experimental/theories/runner/TypeMatchingBetweenMultiDataPointsMethod.java | package org.junit.tests.experimental.theories.runner;
import static org.junit.Assert.assertThat;
import static org.junit.experimental.results.PrintableResult.testResult;
import static org.junit.experimental.results.ResultMatchers.isSuccessful;
import org.junit.Test;
import org.junit.experimental.theories.DataPoint;
import org.junit.experimental.theories.DataPoints;
import org.junit.experimental.theories.Theories;
import org.junit.experimental.theories.Theory;
import org.junit.runner.RunWith;
public class TypeMatchingBetweenMultiDataPointsMethod {
@RunWith(Theories.class)
public static class WithWrongfullyTypedDataPointsMethod {
@DataPoint
public static String[] correctlyTyped = {"Good", "Morning"};
@DataPoints
public static String[] wrongfullyTyped() {
return new String[]{"Hello", "World"};
}
@Theory
public void testTheory(String[] array) {
}
}
@Test
public void ignoreWrongTypedDataPointsMethod() {
assertThat(testResult(WithWrongfullyTypedDataPointsMethod.class), isSuccessful());
}
@RunWith(Theories.class)
public static class WithCorrectlyTypedDataPointsMethod {
@DataPoint
public static String[] correctlyTyped = {"Good", "Morning"};
@DataPoints
public static String[][] anotherCorrectlyTyped() {
return new String[][]{
{"Hello", "World"}
};
}
@Theory
public void testTheory(String[] array) {
}
}
@Test
public void pickUpMultiPointDataPointMethods() throws Exception {
assertThat(testResult(WithCorrectlyTypedDataPointsMethod.class), isSuccessful());
}
}
|
SoftmedTanzania/opensrp-client-chw | opensrp-chw/src/main/java/org/smartregister/chw/activity/HivIndexContactProfileActivity.java | package org.smartregister.chw.activity;
import static org.smartregister.chw.hiv.util.Constants.ActivityPayload.HIV_MEMBER_OBJECT;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.util.Pair;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.LinearLayout;
import com.vijay.jsonwizard.utils.FormUtils;
import org.apache.commons.lang3.StringUtils;
import org.json.JSONException;
import org.json.JSONObject;
import org.smartregister.chw.BuildConfig;
import org.smartregister.chw.R;
import org.smartregister.chw.application.ChwApplication;
import org.smartregister.chw.core.activity.CoreHivIndexContactProfileActivity;
import org.smartregister.chw.core.adapter.NotificationListAdapter;
import org.smartregister.chw.core.contract.FamilyProfileExtendedContract;
import org.smartregister.chw.core.interactor.CoreHivIndexContactProfileInteractor;
import org.smartregister.chw.core.listener.OnClickFloatingMenu;
import org.smartregister.chw.core.listener.OnRetrieveNotifications;
import org.smartregister.chw.core.utils.ChwNotificationUtil;
import org.smartregister.chw.core.utils.CoreConstants;
import org.smartregister.chw.custom_view.HivIndexContactFloatingMenu;
import org.smartregister.chw.hiv.activity.BaseHivFormsActivity;
import org.smartregister.chw.hiv.dao.HivDao;
import org.smartregister.chw.hiv.dao.HivIndexDao;
import org.smartregister.chw.hiv.domain.HivIndexContactObject;
import org.smartregister.chw.model.ReferralTypeModel;
import org.smartregister.chw.presenter.HivIndexContactProfilePresenter;
import org.smartregister.chw.tb.util.Constants;
import org.smartregister.commonregistry.CommonPersonObjectClient;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import timber.log.Timber;
public class HivIndexContactProfileActivity extends CoreHivIndexContactProfileActivity implements FamilyProfileExtendedContract.PresenterCallBack, OnRetrieveNotifications {
public final static String REGISTERED_TO_HIV_REGISTRY = "registered_to_hiv_registry";
private CommonPersonObjectClient commonPersonObjectClient;
private List<ReferralTypeModel> referralTypeModels = new ArrayList<>();
private NotificationListAdapter notificationListAdapter = new NotificationListAdapter();
public static void startHivIndexContactProfileActivity(Activity activity, HivIndexContactObject hivIndexContactObject) {
Intent intent = new Intent(activity, HivIndexContactProfileActivity.class);
intent.putExtra(HIV_MEMBER_OBJECT, hivIndexContactObject);
activity.startActivity(intent);
}
public static void startHivIndexContactFollowupActivity(Activity activity, String baseEntityID) throws JSONException {
Intent intent = new Intent(activity, BaseHivFormsActivity.class);
intent.putExtra(org.smartregister.chw.hiv.util.Constants.ActivityPayload.BASE_ENTITY_ID, baseEntityID);
HivIndexContactObject hivIndexContactObject = HivIndexDao.getMember(baseEntityID);
JSONObject form = (new FormUtils()).getFormJsonFromRepositoryOrAssets(activity, CoreConstants.JSON_FORM.getHivIndexContactFollowupVisit());
intent.putExtra(org.smartregister.chw.hiv.util.Constants.ActivityPayload.JSON_FORM, form.toString());
intent.putExtra(org.smartregister.chw.hiv.util.Constants.ActivityPayload.ACTION, Constants.ActivityPayloadType.FOLLOW_UP_VISIT);
intent.putExtra(org.smartregister.chw.hiv.util.Constants.ActivityPayload.USE_DEFAULT_NEAT_FORM_LAYOUT, false);
activity.startActivityForResult(intent, org.smartregister.chw.anc.util.Constants.REQUEST_CODE_HOME_VISIT);
}
@Override
public void setupViews() {
super.setupViews();
if(getHivIndexContactObject().getFollowedUpByChw()){
}
}
@Override
protected void onCreation() {
super.onCreation();
setCommonPersonObjectClient(getClientDetailsByBaseEntityID(getHivIndexContactObject().getBaseEntityId()));
addHivReferralTypes();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
notificationAndReferralRecyclerView.setAdapter(notificationListAdapter);
notificationListAdapter.setOnClickListener(this);
}
@Override
protected void onResume() {
super.onResume();
notificationListAdapter.canOpen = true;
ChwNotificationUtil.retrieveNotifications(ChwApplication.getApplicationFlavor().hasReferrals(),
getHivIndexContactObject().getBaseEntityId(), this);
}
public CommonPersonObjectClient getCommonPersonObjectClient() {
return commonPersonObjectClient;
}
public void setCommonPersonObjectClient(CommonPersonObjectClient commonPersonObjectClient) {
this.commonPersonObjectClient = commonPersonObjectClient;
}
@Override
protected void initializePresenter() {
showProgressBar(true);
setHivContactProfilePresenter(new HivIndexContactProfilePresenter(this, new CoreHivIndexContactProfileInteractor(), getHivIndexContactObject()));
fetchProfileData();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int itemId = item.getItemId();
try {
if (itemId == R.id.action_issue_hiv_community_followup_referral) {
HivRegisterActivity.startHIVFormActivity(this, getHivIndexContactObject().getBaseEntityId(), CoreConstants.JSON_FORM.getHivIndexContactCommunityFollowupReferral(), (new FormUtils()).getFormJsonFromRepositoryOrAssets(this, CoreConstants.JSON_FORM.getHivIndexContactCommunityFollowupReferral()).toString());
return true;
}
} catch (JSONException e) {
Timber.e(e);
}
return super.onOptionsItemSelected(item);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(org.smartregister.chw.core.R.menu.hiv_profile_menu, menu);
return true;
}
@Override
public void openFollowUpVisitForm(boolean isEdit) {
if (!isEdit) {
try {
startHivIndexContactFollowupActivity(this, getHivIndexContactObject().getBaseEntityId());
} catch (JSONException e) {
Timber.e(e);
}
}
}
@Override
protected void removeMember() {
// Not required for HF (as seen in other profile activities)?
}
@Override
public void onClick(View view) {
super.onClick(view);
int id = view.getId();
if (id == R.id.record_hiv_followup_visit) {
openFollowUpVisitForm(false);
}
}
private void addHivReferralTypes() {
if (BuildConfig.USE_UNIFIED_REFERRAL_APPROACH) {
referralTypeModels.add(new ReferralTypeModel(getString(R.string.hts_referral),
CoreConstants.JSON_FORM.getHtsReferralForm(), CoreConstants.TASKS_FOCUS.SUSPECTED_HIV));
// referralTypeModels.add(new ReferralTypeModel(getString(R.string.gbv_referral),
// CoreConstants.JSON_FORM.getGbvReferralForm(), CoreConstants.TASKS_FOCUS.SUSPECTED_GBV));
}
}
public List<ReferralTypeModel> getReferralTypeModels() {
return referralTypeModels;
}
@Override
public void initializeCallFAB() {
setHivFloatingMenu(new HivIndexContactFloatingMenu(this, getHivIndexContactObject()));
OnClickFloatingMenu onClickFloatingMenu = viewId -> {
switch (viewId) {
case R.id.hiv_fab:
checkPhoneNumberProvided();
((HivIndexContactFloatingMenu) getHivFloatingMenu()).animateFAB();
break;
case R.id.call_layout:
((HivIndexContactFloatingMenu) getHivFloatingMenu()).launchCallWidget();
((HivIndexContactFloatingMenu) getHivFloatingMenu()).animateFAB();
break;
case R.id.refer_to_facility_layout:
((HivIndexContactProfilePresenter) getHivContactProfilePresenter()).referToFacility();
break;
default:
Timber.d("Unknown fab action");
break;
}
};
((HivIndexContactFloatingMenu) getHivFloatingMenu()).setFloatMenuClickListener(onClickFloatingMenu);
getHivFloatingMenu().setGravity(Gravity.BOTTOM | Gravity.END);
LinearLayout.LayoutParams linearLayoutParams = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.MATCH_PARENT);
addContentView(getHivFloatingMenu(), linearLayoutParams);
}
private void checkPhoneNumberProvided() {
boolean phoneNumberAvailable = (StringUtils.isNotBlank(getHivIndexContactObject().getPhoneNumber()));
((HivIndexContactFloatingMenu) getHivFloatingMenu()).redraw(phoneNumberAvailable);
}
@Override
public Context getContext() {
return HivIndexContactProfileActivity.this;
}
@Override
public void verifyHasPhone() {
// Implement
}
@Override
public void notifyHasPhone(boolean b) {
// Implement
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode != Activity.RESULT_OK) return;
try {
boolean savedToHivRegistry = data.getBooleanExtra(REGISTERED_TO_HIV_REGISTRY, false);
if (savedToHivRegistry) {
HivProfileActivity.startHivProfileActivity(this, Objects.requireNonNull(HivDao.getMember(getHivIndexContactObject().getBaseEntityId())));
finish();
} else {
setHivIndexContactObject(HivIndexDao.getMember(getHivIndexContactObject().getBaseEntityId()));
initializePresenter();
fetchProfileData();
}
} catch (Exception e) {
Timber.e(e);
}
}
@Override
public void setFollowUpButtonDue() {
super.setFollowUpButtonDue();
showFollowUpVisitButton(!getHivIndexContactObject().getFollowedUpByChw());
}
@Override
public void onReceivedNotifications(List<Pair<String, String>> list) {
}
}
|
Sher-Chowdhury/javascript_study_guide | 13_asynchronous_programming/02_setTimeout3.js | <gh_stars>0
'use strict'
// the previous example can be rewritten like this:
console.log("hello 1")
// this time we're passing in a third and fourth argument into the setTimeout function.
// https://developer.mozilla.org/en-US/docs/Web/API/setTimeout
setTimeout((msg, number) => console.log(msg, number), 3000, "hello", "2")
console.log("hello 3")
|
Angel1612/Computer_Science_UNSA | Seguridad en Computacion/Laboratorio 1/4.cpp | #include<iostream>
#include<string.h>
#include<stdio.h>
#include <locale.h>
#include <wchar.h>
#include <stdlib.h>
#include <fstream>
using namespace std;
int main(){
string line;
setlocale(LC_CTYPE, "Spanish");
//setlocale(LC_ALL, "spanish");
//setlocale(LC_ALL,"");
//setlocale(LC_ALL, "es_ES");
//setlocale(LC_ALL,"es_ES.UTF8");
ofstream file;
file.open("HERALDOSNEGROS_pre.txt");
while(getline(cin, line)){
const char CaracteresIndeseados[] = {'¡', '!', ';', ' ', '.',','};
int Switch = 0;
for (int i = 0; i < strlen(line.c_str()); i++)
{
for (int j = 0; j < strlen(CaracteresIndeseados); j++)
if (line[i] == CaracteresIndeseados[j]) Switch = 1;
if (Switch == 0){
file << line[i];
}
Switch = 0;
}
file << '\n';
}
file.close();
return 0;
} |
OpenMPDK/SMDK | lib/linux-5.18-rc3-smdk/drivers/net/ethernet/netronome/nfp/nfd3/xsk.c | // SPDX-License-Identifier: (GPL-2.0-only OR BSD-2-Clause)
/* Copyright (C) 2018 Netronome Systems, Inc */
/* Copyright (C) 2021 Corigine, Inc */
#include <linux/bpf_trace.h>
#include <linux/netdevice.h>
#include "../nfp_app.h"
#include "../nfp_net.h"
#include "../nfp_net_dp.h"
#include "../nfp_net_xsk.h"
#include "nfd3.h"
static bool
nfp_nfd3_xsk_tx_xdp(const struct nfp_net_dp *dp, struct nfp_net_r_vector *r_vec,
struct nfp_net_rx_ring *rx_ring,
struct nfp_net_tx_ring *tx_ring,
struct nfp_net_xsk_rx_buf *xrxbuf, unsigned int pkt_len,
int pkt_off)
{
struct xsk_buff_pool *pool = r_vec->xsk_pool;
struct nfp_nfd3_tx_buf *txbuf;
struct nfp_nfd3_tx_desc *txd;
unsigned int wr_idx;
if (nfp_net_tx_space(tx_ring) < 1)
return false;
xsk_buff_raw_dma_sync_for_device(pool, xrxbuf->dma_addr + pkt_off,
pkt_len);
wr_idx = D_IDX(tx_ring, tx_ring->wr_p);
txbuf = &tx_ring->txbufs[wr_idx];
txbuf->xdp = xrxbuf->xdp;
txbuf->real_len = pkt_len;
txbuf->is_xsk_tx = true;
/* Build TX descriptor */
txd = &tx_ring->txds[wr_idx];
txd->offset_eop = NFD3_DESC_TX_EOP;
txd->dma_len = cpu_to_le16(pkt_len);
nfp_desc_set_dma_addr(txd, xrxbuf->dma_addr + pkt_off);
txd->data_len = cpu_to_le16(pkt_len);
txd->flags = 0;
txd->mss = 0;
txd->lso_hdrlen = 0;
tx_ring->wr_ptr_add++;
tx_ring->wr_p++;
return true;
}
static void nfp_nfd3_xsk_rx_skb(struct nfp_net_rx_ring *rx_ring,
const struct nfp_net_rx_desc *rxd,
struct nfp_net_xsk_rx_buf *xrxbuf,
const struct nfp_meta_parsed *meta,
unsigned int pkt_len,
bool meta_xdp,
unsigned int *skbs_polled)
{
struct nfp_net_r_vector *r_vec = rx_ring->r_vec;
struct nfp_net_dp *dp = &r_vec->nfp_net->dp;
struct net_device *netdev;
struct sk_buff *skb;
if (likely(!meta->portid)) {
netdev = dp->netdev;
} else {
struct nfp_net *nn = netdev_priv(dp->netdev);
netdev = nfp_app_dev_get(nn->app, meta->portid, NULL);
if (unlikely(!netdev)) {
nfp_net_xsk_rx_drop(r_vec, xrxbuf);
return;
}
nfp_repr_inc_rx_stats(netdev, pkt_len);
}
skb = napi_alloc_skb(&r_vec->napi, pkt_len);
if (!skb) {
nfp_net_xsk_rx_drop(r_vec, xrxbuf);
return;
}
memcpy(skb_put(skb, pkt_len), xrxbuf->xdp->data, pkt_len);
skb->mark = meta->mark;
skb_set_hash(skb, meta->hash, meta->hash_type);
skb_record_rx_queue(skb, rx_ring->idx);
skb->protocol = eth_type_trans(skb, netdev);
nfp_nfd3_rx_csum(dp, r_vec, rxd, meta, skb);
if (rxd->rxd.flags & PCIE_DESC_RX_VLAN)
__vlan_hwaccel_put_tag(skb, htons(ETH_P_8021Q),
le16_to_cpu(rxd->rxd.vlan));
if (meta_xdp)
skb_metadata_set(skb,
xrxbuf->xdp->data - xrxbuf->xdp->data_meta);
napi_gro_receive(&rx_ring->r_vec->napi, skb);
nfp_net_xsk_rx_free(xrxbuf);
(*skbs_polled)++;
}
static unsigned int
nfp_nfd3_xsk_rx(struct nfp_net_rx_ring *rx_ring, int budget,
unsigned int *skbs_polled)
{
struct nfp_net_r_vector *r_vec = rx_ring->r_vec;
struct nfp_net_dp *dp = &r_vec->nfp_net->dp;
struct nfp_net_tx_ring *tx_ring;
struct bpf_prog *xdp_prog;
bool xdp_redir = false;
int pkts_polled = 0;
xdp_prog = READ_ONCE(dp->xdp_prog);
tx_ring = r_vec->xdp_ring;
while (pkts_polled < budget) {
unsigned int meta_len, data_len, pkt_len, pkt_off;
struct nfp_net_xsk_rx_buf *xrxbuf;
struct nfp_net_rx_desc *rxd;
struct nfp_meta_parsed meta;
int idx, act;
idx = D_IDX(rx_ring, rx_ring->rd_p);
rxd = &rx_ring->rxds[idx];
if (!(rxd->rxd.meta_len_dd & PCIE_DESC_RX_DD))
break;
rx_ring->rd_p++;
pkts_polled++;
xrxbuf = &rx_ring->xsk_rxbufs[idx];
/* If starved of buffers "drop" it and scream. */
if (rx_ring->rd_p >= rx_ring->wr_p) {
nn_dp_warn(dp, "Starved of RX buffers\n");
nfp_net_xsk_rx_drop(r_vec, xrxbuf);
break;
}
/* Memory barrier to ensure that we won't do other reads
* before the DD bit.
*/
dma_rmb();
memset(&meta, 0, sizeof(meta));
/* Only supporting AF_XDP with dynamic metadata so buffer layout
* is always:
*
* ---------------------------------------------------------
* | off | metadata | packet | XXXX |
* ---------------------------------------------------------
*/
meta_len = rxd->rxd.meta_len_dd & PCIE_DESC_RX_META_LEN_MASK;
data_len = le16_to_cpu(rxd->rxd.data_len);
pkt_len = data_len - meta_len;
if (unlikely(meta_len > NFP_NET_MAX_PREPEND)) {
nn_dp_warn(dp, "Oversized RX packet metadata %u\n",
meta_len);
nfp_net_xsk_rx_drop(r_vec, xrxbuf);
continue;
}
/* Stats update. */
u64_stats_update_begin(&r_vec->rx_sync);
r_vec->rx_pkts++;
r_vec->rx_bytes += pkt_len;
u64_stats_update_end(&r_vec->rx_sync);
xrxbuf->xdp->data += meta_len;
xrxbuf->xdp->data_end = xrxbuf->xdp->data + pkt_len;
xdp_set_data_meta_invalid(xrxbuf->xdp);
xsk_buff_dma_sync_for_cpu(xrxbuf->xdp, r_vec->xsk_pool);
net_prefetch(xrxbuf->xdp->data);
if (meta_len) {
if (unlikely(nfp_nfd3_parse_meta(dp->netdev, &meta,
xrxbuf->xdp->data -
meta_len,
xrxbuf->xdp->data,
pkt_len, meta_len))) {
nn_dp_warn(dp, "Invalid RX packet metadata\n");
nfp_net_xsk_rx_drop(r_vec, xrxbuf);
continue;
}
if (unlikely(meta.portid)) {
struct nfp_net *nn = netdev_priv(dp->netdev);
if (meta.portid != NFP_META_PORT_ID_CTRL) {
nfp_nfd3_xsk_rx_skb(rx_ring, rxd,
xrxbuf, &meta,
pkt_len, false,
skbs_polled);
continue;
}
nfp_app_ctrl_rx_raw(nn->app, xrxbuf->xdp->data,
pkt_len);
nfp_net_xsk_rx_free(xrxbuf);
continue;
}
}
act = bpf_prog_run_xdp(xdp_prog, xrxbuf->xdp);
pkt_len = xrxbuf->xdp->data_end - xrxbuf->xdp->data;
pkt_off = xrxbuf->xdp->data - xrxbuf->xdp->data_hard_start;
switch (act) {
case XDP_PASS:
nfp_nfd3_xsk_rx_skb(rx_ring, rxd, xrxbuf, &meta, pkt_len,
true, skbs_polled);
break;
case XDP_TX:
if (!nfp_nfd3_xsk_tx_xdp(dp, r_vec, rx_ring, tx_ring,
xrxbuf, pkt_len, pkt_off))
nfp_net_xsk_rx_drop(r_vec, xrxbuf);
else
nfp_net_xsk_rx_unstash(xrxbuf);
break;
case XDP_REDIRECT:
if (xdp_do_redirect(dp->netdev, xrxbuf->xdp, xdp_prog)) {
nfp_net_xsk_rx_drop(r_vec, xrxbuf);
} else {
nfp_net_xsk_rx_unstash(xrxbuf);
xdp_redir = true;
}
break;
default:
bpf_warn_invalid_xdp_action(dp->netdev, xdp_prog, act);
fallthrough;
case XDP_ABORTED:
trace_xdp_exception(dp->netdev, xdp_prog, act);
fallthrough;
case XDP_DROP:
nfp_net_xsk_rx_drop(r_vec, xrxbuf);
break;
}
}
nfp_net_xsk_rx_ring_fill_freelist(r_vec->rx_ring);
if (xdp_redir)
xdp_do_flush_map();
if (tx_ring->wr_ptr_add)
nfp_net_tx_xmit_more_flush(tx_ring);
return pkts_polled;
}
void nfp_nfd3_xsk_tx_free(struct nfp_nfd3_tx_buf *txbuf)
{
xsk_buff_free(txbuf->xdp);
txbuf->dma_addr = 0;
txbuf->xdp = NULL;
}
static bool nfp_nfd3_xsk_complete(struct nfp_net_tx_ring *tx_ring)
{
struct nfp_net_r_vector *r_vec = tx_ring->r_vec;
u32 done_pkts = 0, done_bytes = 0, reused = 0;
bool done_all;
int idx, todo;
u32 qcp_rd_p;
if (tx_ring->wr_p == tx_ring->rd_p)
return true;
/* Work out how many descriptors have been transmitted. */
qcp_rd_p = nfp_qcp_rd_ptr_read(tx_ring->qcp_q);
if (qcp_rd_p == tx_ring->qcp_rd_p)
return true;
todo = D_IDX(tx_ring, qcp_rd_p - tx_ring->qcp_rd_p);
done_all = todo <= NFP_NET_XDP_MAX_COMPLETE;
todo = min(todo, NFP_NET_XDP_MAX_COMPLETE);
tx_ring->qcp_rd_p = D_IDX(tx_ring, tx_ring->qcp_rd_p + todo);
done_pkts = todo;
while (todo--) {
struct nfp_nfd3_tx_buf *txbuf;
idx = D_IDX(tx_ring, tx_ring->rd_p);
tx_ring->rd_p++;
txbuf = &tx_ring->txbufs[idx];
if (unlikely(!txbuf->real_len))
continue;
done_bytes += txbuf->real_len;
txbuf->real_len = 0;
if (txbuf->is_xsk_tx) {
nfp_nfd3_xsk_tx_free(txbuf);
reused++;
}
}
u64_stats_update_begin(&r_vec->tx_sync);
r_vec->tx_bytes += done_bytes;
r_vec->tx_pkts += done_pkts;
u64_stats_update_end(&r_vec->tx_sync);
xsk_tx_completed(r_vec->xsk_pool, done_pkts - reused);
WARN_ONCE(tx_ring->wr_p - tx_ring->rd_p > tx_ring->cnt,
"XDP TX ring corruption rd_p=%u wr_p=%u cnt=%u\n",
tx_ring->rd_p, tx_ring->wr_p, tx_ring->cnt);
return done_all;
}
static void nfp_nfd3_xsk_tx(struct nfp_net_tx_ring *tx_ring)
{
struct nfp_net_r_vector *r_vec = tx_ring->r_vec;
struct xdp_desc desc[NFP_NET_XSK_TX_BATCH];
struct xsk_buff_pool *xsk_pool;
struct nfp_nfd3_tx_desc *txd;
u32 pkts = 0, wr_idx;
u32 i, got;
xsk_pool = r_vec->xsk_pool;
while (nfp_net_tx_space(tx_ring) >= NFP_NET_XSK_TX_BATCH) {
for (i = 0; i < NFP_NET_XSK_TX_BATCH; i++)
if (!xsk_tx_peek_desc(xsk_pool, &desc[i]))
break;
got = i;
if (!got)
break;
wr_idx = D_IDX(tx_ring, tx_ring->wr_p + i);
prefetchw(&tx_ring->txds[wr_idx]);
for (i = 0; i < got; i++)
xsk_buff_raw_dma_sync_for_device(xsk_pool, desc[i].addr,
desc[i].len);
for (i = 0; i < got; i++) {
wr_idx = D_IDX(tx_ring, tx_ring->wr_p + i);
tx_ring->txbufs[wr_idx].real_len = desc[i].len;
tx_ring->txbufs[wr_idx].is_xsk_tx = false;
/* Build TX descriptor. */
txd = &tx_ring->txds[wr_idx];
nfp_desc_set_dma_addr(txd,
xsk_buff_raw_get_dma(xsk_pool,
desc[i].addr
));
txd->offset_eop = NFD3_DESC_TX_EOP;
txd->dma_len = cpu_to_le16(desc[i].len);
txd->data_len = cpu_to_le16(desc[i].len);
}
tx_ring->wr_p += got;
pkts += got;
}
if (!pkts)
return;
xsk_tx_release(xsk_pool);
/* Ensure all records are visible before incrementing write counter. */
wmb();
nfp_qcp_wr_ptr_add(tx_ring->qcp_q, pkts);
}
int nfp_nfd3_xsk_poll(struct napi_struct *napi, int budget)
{
struct nfp_net_r_vector *r_vec =
container_of(napi, struct nfp_net_r_vector, napi);
unsigned int pkts_polled, skbs = 0;
pkts_polled = nfp_nfd3_xsk_rx(r_vec->rx_ring, budget, &skbs);
if (pkts_polled < budget) {
if (r_vec->tx_ring)
nfp_nfd3_tx_complete(r_vec->tx_ring, budget);
if (!nfp_nfd3_xsk_complete(r_vec->xdp_ring))
pkts_polled = budget;
nfp_nfd3_xsk_tx(r_vec->xdp_ring);
if (pkts_polled < budget && napi_complete_done(napi, skbs))
nfp_net_irq_unmask(r_vec->nfp_net, r_vec->irq_entry);
}
return pkts_polled;
}
|
varadeth/DIVOC | backend/test_certificate_signer/config/keys.js | const { KeyType } = require('certificate-signer-library/signer');
const publicKeyPem = process.env.CERTIFICATE_PUBLIC_KEY;
// eslint-disable-next-line max-len
const signingKeyType = process.env.SIGNING_KEY_TYPE || KeyType.RSA;
const privateKeyPem = process.env.CERTIFICATE_PRIVATE_KEY ;
const smsAuthKey = "";
module.exports = {
publicKeyPem,
privateKeyPem,
smsAuthKey,
signingKeyType
};
/*
// openssl genrsa -out key.pem; cat key.pem;
// openssl rsa -in key.pem -pubout -out pubkey.pem;
// cat pubkey.pem; rm key.pem pubkey.pem
*/ |
neopsis/envas-demo | envasDemo-wb/src/main/java/com/neopsis/envas/demo/BEnvasDemo.java | <reponame>neopsis/envas-demo
/*
* @(#)BNvHistoryDemo.java 25.04.2014
*
* Copyright (c) 2007 Neopsis GmbH
*
*
*/
package com.neopsis.envas.demo;
import com.neopsis.envas.BNvApplication;
import com.neopsis.envas.NvUI;
import javax.baja.nre.annotations.NiagaraType;
import javax.baja.sys.Sys;
import javax.baja.sys.Type;
/**
* Demo Application provider
*
*/
@NiagaraType
public class BEnvasDemo extends BNvApplication {
/*+ ------------ BEGIN BAJA AUTO GENERATED CODE ------------ +*/
/*@ $com.neopsis.envas.demo.BEnvasDemo(2979906276)1.0$ @*/
/* Generated Wed May 12 00:48:10 CEST 2021 by Slot-o-Matic (c) Tridium, Inc. 2012 */
////////////////////////////////////////////////////////////////
// Type
////////////////////////////////////////////////////////////////
@Override
public Type getType() {
return TYPE;
}
public static final Type TYPE = Sys.loadType(BEnvasDemo.class);
/*+ ------------ END BAJA AUTO GENERATED CODE -------------- +*/
public BEnvasDemo() {}
public String getApplicationName() {
return "Envas Demo";
}
public String getApplicationDescription() {
return "Envas feature sampler";
}
public NvUI getDesktopUI() {
return new NvDemo();
}
@Override
public String getDesktopTheme() {
return "envas";
}
}
|
nfwGytautas/Triton | TritonCore/src/Triton2/Assert.h | #pragma once
#ifdef TR_ENABLE_ASSERTS
#define TR_ASSERT(x, ...) { if(!(x)) { __debugbreak(); } }
#define TR_CORE_ASSERT(x, ...) { if(!(x)) { __debugbreak(); } }
#define TR_STATIC_ASSERT(x, ...) { static_assert(x, __VA_ARGS__); }
#else
#define TR_ASSERT(x, ...)
#define TR_CORE_ASSERT(x, ...)
#define TR_STATIC_ASSERT(x, ...)
#endif |
ElSaico/vgmtrans | src/ui/qt/MenuBar.cpp | /*
* VGMTrans (c) 2002-2019
* Licensed under the zlib license,
* refer to the included LICENSE.txt file
*/
#include "MenuBar.h"
#include <QActionGroup>
#include <QDockWidget>
#include "Options.h"
#include "Root.h"
MenuBar::MenuBar(QWidget *parent, const QList<QDockWidget *> &dockWidgets) : QMenuBar(parent) {
appendFileMenu();
appendOptionsMenu(dockWidgets);
appendInfoMenu();
}
void MenuBar::appendFileMenu() {
QMenu *file_dropdown = addMenu("File");
menu_open_file = file_dropdown->addAction("Open");
menu_open_file->setShortcut(QKeySequence(QStringLiteral("Ctrl+O")));
connect(menu_open_file, &QAction::triggered, this, &MenuBar::openFile);
file_dropdown->addSeparator();
menu_app_exit = file_dropdown->addAction("Exit");
menu_app_exit->setShortcut(QKeySequence(QStringLiteral("Alt+F4")));
connect(menu_app_exit, &QAction::triggered, this, &MenuBar::exit);
}
void MenuBar::appendOptionsMenu(const QList<QDockWidget *> &dockWidgets) {
QMenu *options_dropdown = addMenu("Options");
auto bs = options_dropdown->addMenu("Bank select style");
QActionGroup *bs_grp = new QActionGroup(this);
auto act = bs->addAction("GS (Default)");
act->setCheckable(true);
act->setChecked(true);
bs_grp->addAction(act);
act = bs->addAction("MMA");
act->setCheckable(true);
bs_grp->addAction(act);
connect(bs_grp, &QActionGroup::triggered, [](QAction *bs_style) {
if (auto text = bs_style->text(); text == "GS (Default)") {
ConversionOptions::the().SetBankSelectStyle(BankSelectStyle::GS);
} else if (text == "MMA") {
ConversionOptions::the().SetBankSelectStyle(BankSelectStyle::MMA);
pRoot->UI_AddLogItem(
new LogItem(L"MMA style (CC0 * 128 + CC32) bank select was chosen and "
L"it will be used for bank select events in generated MIDIs. This "
L"will cause in-program playback to sound incorrect!",
LOG_LEVEL_WARN, L"VGMTransQt"));
}
});
options_dropdown->addSeparator();
for (auto &widget : dockWidgets) {
options_dropdown->addAction(widget->toggleViewAction());
}
}
void MenuBar::appendInfoMenu() {
QMenu *info_dropdown = addMenu("Help");
menu_about_dlg = info_dropdown->addAction("About VGMTrans");
connect(menu_about_dlg, &QAction::triggered, this, &MenuBar::showAbout);
}
|
vinirms/sites-responsivos | site arquiteto/node_modules/@fortawesome/free-solid-svg-icons/faPersonPraying.js | 'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var prefix = 'fas';
var iconName = 'person-praying';
var width = 384;
var height = 512;
var aliases = [128720,"pray"];
var unicode = 'f683';
var svgPathData = 'M255.1 128c35.38 0 63.1-28.62 63.1-64s-28.62-64-63.1-64S191.1 28.62 191.1 64S220.6 128 255.1 128zM225.4 297.8c14 16.75 39 19.12 56.01 5.25l88.01-72c17-14 19.5-39.25 5.625-56.38c-14-17.12-39.25-19.5-56.38-5.625L261.3 216l-39-46.25c-15.38-18.38-39.13-27.88-64.01-25.38c-24.13 2.5-45.25 16.25-56.38 37l-49.38 92C29.13 317 43.88 369.8 86.76 397.1L131.5 432H40C17.88 432 0 449.9 0 472S17.88 512 40 512h208c34.13 0 53.76-42.75 28.25-68.25L166.4 333.9L201.3 269L225.4 297.8z';
exports.definition = {
prefix: prefix,
iconName: iconName,
icon: [
width,
height,
aliases,
unicode,
svgPathData
]};
exports.faPersonPraying = exports.definition;
exports.prefix = prefix;
exports.iconName = iconName;
exports.width = width;
exports.height = height;
exports.ligatures = aliases;
exports.unicode = unicode;
exports.svgPathData = svgPathData;
exports.aliases = aliases; |
xbest/leetcode | src/main/java/com/gmail/imshhui/medium/ValidateBinarySearchTree.java | package com.gmail.imshhui.medium;
import com.gmail.imshhui.bean.TreeNode;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
/**
* Given a binary tree, determine if it is a valid binary search tree (BST).
* <p>
* Assume a BST is defined as follows:
* <p>
* The left subtree of a node contains only nodes with keys less than the node's key.
* The right subtree of a node contains only nodes with keys greater than the node's key.
* Both the left and right subtrees must also be binary search trees.
* <p>
* <p>
* Example 1:
* <p>
* 2
* / \
* 1 3
* <p>
* Input: [2,1,3]
* Output: true
* Example 2:
* <p>
* 5
* / \
* 1 4
* / \
* 3 6
* <p>
* Input: [5,1,4,null,null,3,6]
* Output: false
* Explanation: The root node's value is 5 but its right child's value is 4.
*
* @see <a href="https://leetcode.com/problems/validate-binary-search-tree/"></a>
* User: liyulin
* Date: 2019/8/15
*/
public class ValidateBinarySearchTree {
public boolean isValidBST(TreeNode root) {
Stack<TreeNode> stack = new Stack<>();
while (root != null || stack.size() > 0) {
if (root != null &&
(root.left != null && getMaxOfBST(root.left) >= root.val
|| root.right != null && getMinOfBST(root.right) <= root.val)) {
return false;
}
if (root != null) {
stack.push(root);
root = root.left;
} else {
root = stack.pop();
root = root.right;
}
}
return true;
}
private int getMinOfBST(TreeNode root) {
int min = root.val;
while (root != null) {
min = root.val < min ? root.val : min;
root = root.left;
}
return min;
}
private int getMaxOfBST(TreeNode root) {
int max = root.val;
while (root != null) {
max = root.val > max ? root.val : max;
root = root.right;
}
return max;
}
public boolean isValidBST1(TreeNode root) {
Stack<TreeNode> stack = new Stack<>();
List<Integer> inorderList = new ArrayList<>();
while (root != null || stack.size() > 0) {
if (root != null) {
stack.push(root);
root = root.left;
} else {
root = stack.pop();
inorderList.add(root.val);
root = root.right;
}
}
for (int i = 0; i < inorderList.size() - 1; i++) {
if (inorderList.get(i) >= inorderList.get(i + 1)) {
return false;
}
}
return true;
}
}
|
mehrdad-shokri/retdec | include/retdec/pelib/DebugDirectory.h | <filename>include/retdec/pelib/DebugDirectory.h
/*
* DebugDirectory.h - Part of the PeLib library.
*
* Copyright (c) 2004 - 2005 <NAME> (<EMAIL>)
* All rights reserved.
*
* This software is licensed under the zlib/libpng License.
* For more details see http://www.opensource.org/licenses/zlib-license.php
* or the license information file (license.htm) in the root directory
* of PeLib.
*/
#ifndef RETDEC_PELIB_DEBUGDIRECTORY_H
#define RETDEC_PELIB_DEBUGDIRECTORY_H
#include "retdec/pelib/ImageLoader.h"
namespace PeLib
{
/// Class that handles the Debug directory.
class DebugDirectory
{
protected:
/// Stores the various DebugDirectory structures.
std::vector<PELIB_IMG_DEBUG_DIRECTORY> m_vDebugInfo;
/// Stores RVAs which are occupied by this debug directory.
std::vector<std::pair<unsigned int, unsigned int>> m_occupiedAddresses;
void read(ImageLoader & imageLoader, std::vector<PELIB_IMG_DEBUG_DIRECTORY> & debugInfo, std::uint32_t rva, std::uint32_t size);
public:
virtual ~DebugDirectory() = default;
/// Reads the Debug directory from a file.
int read(std::istream& inStream, ImageLoader & imageLoader);
///
void clear(); // EXPORT
/// Rebuilds the current Debug directory.
void rebuild(std::vector<std::uint8_t>& obBuffer) const; // EXPORT
/// Returns the size the current Debug directory needs after rebuilding.
unsigned int size() const;
/// Writes the current Debug directory back to a file.
int write(const std::string& strFilename, unsigned int uiOffset) const; // EXPORT
/// Returns the number of DebugDirectory image structures in the current DebugDirectory.
unsigned int calcNumberOfEntries() const; // EXPORT
/// Adds a new debug structure.
void addEntry(); // EXPORT
/// Removes a debug structure.
void removeEntry(std::size_t uiIndex); // EXPORT
/// Returns the Characteristics value of a debug structure.
std::uint32_t getCharacteristics(std::size_t uiIndex) const; // EXPORT
/// Returns the TimeDateStamp value of a debug structure.
std::uint32_t getTimeDateStamp(std::size_t uiIndex) const; // EXPORT
/// Returns the MajorVersion value of a debug structure.
std::uint16_t getMajorVersion(std::size_t uiIndex) const; // EXPORT
/// Returns the MinorVersion value of a debug structure.
std::uint16_t getMinorVersion(std::size_t uiIndex) const; // EXPORT
/// Returns the Type value of a debug structure.
std::uint32_t getType(std::size_t uiIndex) const; // EXPORT
/// Returns the SizeOfData value of a debug structure.
std::uint32_t getSizeOfData(std::size_t uiIndex) const; // EXPORT
/// Returns the AddressOfRawData value of a debug structure.
std::uint32_t getAddressOfRawData(std::size_t uiIndex) const; // EXPORT
/// Returns the PointerToRawData value of a debug structure.
std::uint32_t getPointerToRawData(std::size_t uiIndex) const; // EXPORT
std::vector<std::uint8_t> getData(std::size_t index) const; // EXPORT
/// Sets the Characteristics value of a debug structure.
void setCharacteristics(std::size_t uiIndex, std::uint32_t dwValue); // EXPORT
/// Sets the TimeDateStamp value of a debug structure.
void setTimeDateStamp(std::size_t uiIndex, std::uint32_t dwValue); // EXPORT
/// Sets the MajorVersion value of a debug structure.
void setMajorVersion(std::size_t uiIndex, std::uint16_t wValue); // EXPORT
/// Sets the MinorVersion value of a debug structure.
void setMinorVersion(std::size_t uiIndex, std::uint16_t wValue); // EXPORT
/// Sets the Type value of a debug structure.
void setType(std::size_t uiIndex, std::uint32_t dwValue); // EXPORT
/// Sets the SizeOfData value of a debug structure.
void setSizeOfData(std::size_t uiIndex, std::uint32_t dwValue); // EXPORT
/// Sets the AddressOfRawData value of a debug structure.
void setAddressOfRawData(std::size_t uiIndex, std::uint32_t dwValue); // EXPORT
/// Sets the PointerToRawData value of a debug structure.
void setPointerToRawData(std::size_t uiIndex, std::uint32_t dwValue); // EXPORT
void setData(std::size_t index, const std::vector<std::uint8_t>& data); // EXPORT
const std::vector<std::pair<unsigned int, unsigned int>>& getOccupiedAddresses() const;
};
}
#endif
|
kazunetakahashi/atcoder | 2019/0112_aising2019/D.cpp | <reponame>kazunetakahashi/atcoder
/**
* File : D.cpp
* Author : <NAME>
* Created : 1/12/2019, 9:15:17 PM
* Powered by Visual Studio Code
*/
#include <iostream>
#include <iomanip> // << fixed << setprecision(xxx)
#include <algorithm> // do { } while ( next_permutation(A, A+xxx) ) ;
#include <vector>
#include <string> // to_string(nnn) // substr(m, n) // stoi(nnn)
#include <complex>
#include <tuple>
#include <queue>
#include <stack>
#include <map> // if (M.find(key) != M.end()) { }
#include <set>
#include <functional>
#include <random> // auto rd = bind(uniform_int_distribution<int>(0, 9), mt19937(19920725));
#include <chrono> // std::chrono::system_clock::time_point start_time, end_time;
// start = std::chrono::system_clock::now();
// double elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(end_time-start_time).count();
#include <cctype>
#include <cassert>
#include <cmath>
#include <cstdio>
#include <cstdlib>
using namespace std;
#define DEBUG 0 // change 0 -> 1 if we need debug.
typedef long long ll;
// const int dx[4] = {1, 0, -1, 0};
// const int dy[4] = {0, 1, 0, -1};
// const int C = 1e6+10;
const ll infty = 100000000007;
int N, Q;
ll A[100010];
vector<ll> B;
ll X[100010];
set<ll> S;
map<ll, ll> M;
ll Y[100010];
ll Z[100010];
void flush()
{
for (auto i = 0; i < Q; i++)
{
cout << M[X[i]] << endl;
}
}
int main()
{
cin >> N >> Q;
for (auto i = 0; i < N; i++)
{
cin >> A[i];
}
for (auto i = 0; i < Q; i++)
{
cin >> X[i];
S.insert(X[i]);
}
reverse(A, A + N);
Y[N + 1] = 0;
Y[N] = 0;
Y[N - 1] = A[N - 1];
Y[N - 2] = A[N - 2];
for (auto i = N - 3; i >= 0; i--)
{
Y[i] = A[i] + Y[i + 2];
}
Z[0] = A[0];
for (auto i = 1; i < N; i++)
{
Z[i] = A[i] + Z[i - 1];
}
B.push_back(infty);
for (auto i = 0; i < N; i++)
{
B.push_back(A[i]);
}
B.push_back(-infty);
auto it = S.begin();
for (auto i = N; i >= 0; i--)
{
int second = i / 2;
int first = i - second;
ll score;
if (first == 0)
{
score = Y[0];
}
else if (first == second)
{
score = Z[first - 1] + Y[i];
}
else
{
score = Z[first - 1] + Y[i + 1];
}
ll upper, lower;
if (second == 0)
{
upper = infty;
lower = -infty;
}
else
{
ll FA = B[first];
ll FB = B[first + second];
upper = (FA + FB) / 2;
ll GA = B[first + 1];
ll GB = B[first + second + 1];
lower = (GA + GB) / 2 + 1;
}
// cerr << "i = " << i << ", upper = " << upper << ", lower = " << lower << endl;
while (it != S.end())
{
if (lower <= *it && *it <= upper)
{
M[*it] = score;
it++;
}
else
{
break;
}
}
}
assert(it == S.end());
flush();
} |
marwajomaa/connect5 | server/database/queries/get_question_results.js | <reponame>marwajomaa/connect5
const Answer = require("../models/Answer");
const getQuestionResults = (questionId, sessionId) => new Promise((resolve, reject) => {
Answer.find({ question: questionId, session: sessionId })
.then((res) => {
resolve(res);
})
.catch(reject);
});
module.exports = getQuestionResults;
|
Klarrio/cosmos | cosmos-integration-tests/src/main/scala/com/mesosphere/cosmos/http/CosmosRequests.scala | <filename>cosmos-integration-tests/src/main/scala/com/mesosphere/cosmos/http/CosmosRequests.scala<gh_stars>10-100
package com.mesosphere.cosmos.http
import com.mesosphere.cosmos.rpc
import com.mesosphere.http.MediaType
import io.lemonlabs.uri.Uri
object CosmosRequests {
val capabilities: HttpRequest = {
HttpRequest.get(
path = RawRpcPath("/capabilities"),
accept = rpc.MediaTypes.CapabilitiesResponse
)
}
def packageDescribeV2(
describeRequest: rpc.v1.model.DescribeRequest
): HttpRequest = {
packageDescribe(describeRequest, accept = rpc.MediaTypes.V2DescribeResponse)
}
def packageDescribeV3(
describeRequest: rpc.v1.model.DescribeRequest
): HttpRequest = {
packageDescribe(describeRequest, accept = rpc.MediaTypes.V3DescribeResponse)
}
def packageInstallV1(
installRequest: rpc.v1.model.InstallRequest
): HttpRequest = {
packageInstall(installRequest, accept = rpc.MediaTypes.V1InstallResponse)
}
def packageInstallV2(
installRequest: rpc.v1.model.InstallRequest
): HttpRequest = {
packageInstall(installRequest, accept = rpc.MediaTypes.V2InstallResponse)
}
def packageList(listRequest: rpc.v1.model.ListRequest): HttpRequest = {
HttpRequest.post(
PackageRpcPath("list"),
listRequest,
rpc.MediaTypes.ListRequest,
rpc.MediaTypes.ListResponse
)
}
def packageListVersions(listVersionsRequest: rpc.v1.model.ListVersionsRequest): HttpRequest = {
HttpRequest.post(
path = PackageRpcPath("list-versions"),
body = listVersionsRequest,
contentType = rpc.MediaTypes.ListVersionsRequest,
accept = rpc.MediaTypes.ListVersionsResponse
)
}
def packageRender(renderRequest: rpc.v1.model.RenderRequest): HttpRequest = {
HttpRequest.post(
path = PackageRpcPath("render"),
body = renderRequest,
contentType = rpc.MediaTypes.RenderRequest,
accept = rpc.MediaTypes.RenderResponse
)
}
def packageSearch(searchRequest: rpc.v1.model.SearchRequest): HttpRequest = {
HttpRequest.post(
path = PackageRpcPath("search"),
body = searchRequest,
contentType = rpc.MediaTypes.SearchRequest,
accept = rpc.MediaTypes.SearchResponse
)
}
def packageRepositoryAdd(
repositoryAddRequest: rpc.v1.model.PackageRepositoryAddRequest
): HttpRequest = {
HttpRequest.post(
path = PackageRpcPath("repository/add"),
body = repositoryAddRequest,
contentType = rpc.MediaTypes.PackageRepositoryAddRequest,
accept = rpc.MediaTypes.PackageRepositoryAddResponse
)
}
def packageRepositoryDelete(
repositoryDeleteRequest: rpc.v1.model.PackageRepositoryDeleteRequest
): HttpRequest = {
HttpRequest.post(
path = PackageRpcPath("repository/delete"),
body = repositoryDeleteRequest,
contentType = rpc.MediaTypes.PackageRepositoryDeleteRequest,
accept = rpc.MediaTypes.PackageRepositoryDeleteResponse
)
}
def packageRepositoryList: HttpRequest = {
HttpRequest.post(
path = PackageRpcPath("repository/list"),
body = rpc.v1.model.PackageRepositoryListRequest(),
contentType = rpc.MediaTypes.PackageRepositoryListRequest,
accept = rpc.MediaTypes.PackageRepositoryListResponse
)
}
def packageUninstall(uninstallRequest: rpc.v1.model.UninstallRequest): HttpRequest = {
HttpRequest.post(
path = PackageRpcPath("uninstall"),
body = uninstallRequest,
contentType = rpc.MediaTypes.UninstallRequest,
accept = rpc.MediaTypes.UninstallResponse
)
}
def serviceDescribe(
describeRequest: rpc.v1.model.ServiceDescribeRequest
)(
implicit testContext: TestContext
): HttpRequest = {
HttpRequest.post(
path = ServiceRpcPath("describe"),
body = describeRequest,
contentType = rpc.MediaTypes.ServiceDescribeRequest,
accept = rpc.MediaTypes.ServiceDescribeResponse
)
}
def serviceUpdate(
updateRequest: rpc.v1.model.ServiceUpdateRequest
)(
implicit testContext: TestContext
): HttpRequest = {
HttpRequest.post(
path = ServiceRpcPath("update"),
body = updateRequest,
contentType = rpc.MediaTypes.ServiceUpdateRequest,
accept = rpc.MediaTypes.ServiceUpdateResponse
)
}
def packageResource(resourceUri: Uri): HttpRequest = {
HttpRequest(
path = PackageRpcPath("resource"),
headers = Map.empty,
method = Get("url" -> resourceUri.toString)
)
}
private def packageDescribe(
describeRequest: rpc.v1.model.DescribeRequest,
accept: MediaType
): HttpRequest = {
HttpRequest.post(
path = PackageRpcPath("describe"),
body = describeRequest,
contentType = rpc.MediaTypes.DescribeRequest,
accept = accept
)
}
private def packageInstall(
installRequest: rpc.v1.model.InstallRequest,
accept: MediaType
): HttpRequest = {
HttpRequest.post(
path = PackageRpcPath("install"),
body = installRequest,
contentType = rpc.MediaTypes.InstallRequest,
accept = accept
)
}
}
|
ljtfreitas/java-restify | java-restify-netflix-zookeeper-service-discovery/src/main/java/com/github/ljtfreitas/restify/http/netflix/client/request/ribbon/discovery/zookeeper/ZookeeperCuratorServiceDiscovery.java | <reponame>ljtfreitas/java-restify
/*******************************************************************************
*
* MIT License
*
* Copyright (c) 2016 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*******************************************************************************/
package com.github.ljtfreitas.restify.http.netflix.client.request.ribbon.discovery.zookeeper;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collection;
import java.util.Optional;
import org.apache.curator.RetryPolicy;
import org.apache.curator.RetrySleeper;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.framework.imps.CuratorFrameworkState;
import org.apache.curator.x.discovery.ServiceCacheBuilder;
import org.apache.curator.x.discovery.ServiceDiscovery;
import org.apache.curator.x.discovery.ServiceDiscoveryBuilder;
import org.apache.curator.x.discovery.ServiceInstance;
import org.apache.curator.x.discovery.ServiceProviderBuilder;
import org.apache.curator.x.discovery.details.InstanceSerializer;
import com.github.ljtfreitas.restify.util.Try;
public class ZookeeperCuratorServiceDiscovery<T> implements Closeable {
private final ServiceDiscovery<T> serviceDiscovery;
private final CuratorFramework curator;
public ZookeeperCuratorServiceDiscovery(Class<T> instanceType, ZookeeperConfiguration configuration,
InstanceSerializer<T> serializer) {
CuratorFramework curator = buildCuratorWith(configuration);
this.serviceDiscovery = buildServiceDiscoveryWith(instanceType, configuration, serializer, curator);
this.curator = curator;
}
public ZookeeperCuratorServiceDiscovery(Class<T> instanceType, ZookeeperConfiguration configuration,
InstanceSerializer<T> serializer, CuratorFramework curator) {
this.serviceDiscovery = buildServiceDiscoveryWith(instanceType, configuration, serializer, curator);
this.curator = curator;
}
public CuratorFramework buildCuratorWith(ZookeeperConfiguration configuration) {
return CuratorFrameworkFactory.builder()
.connectString(configuration.connectionString())
.retryPolicy(new RetryNever())
.build();
}
private ServiceDiscovery<T> buildServiceDiscoveryWith(Class<T> instanceType, ZookeeperConfiguration configuration,
InstanceSerializer<T> serializer, CuratorFramework curator) {
try {
if (!CuratorFrameworkState.STARTED.equals(curator.getState())) {
curator.start();
}
ServiceDiscovery<T> serviceDiscovery = ServiceDiscoveryBuilder.builder(instanceType)
.client(curator)
.basePath(configuration.chroot())
.serializer(serializer)
.build();
serviceDiscovery.start();
return serviceDiscovery;
} catch (Exception e) {
throw new ZookeeperServiceDiscoveryException("Error on create Zookeeper ServiceDiscovery", e);
}
}
public Collection<ServiceInstance<T>> queryForInstances(String serviceName) throws Exception {
return serviceDiscovery.queryForInstances(serviceName);
}
public Optional<ServiceInstance<T>> queryForInstance(String name, String id) throws Exception {
return Optional.ofNullable(serviceDiscovery.queryForInstance(name, id));
}
public Optional<ServiceInstance<T>> queryForInstance(ZookeeperServiceInstance instance) {
return doQueryForInstance(instance);
}
private Optional<ServiceInstance<T>> doQueryForInstance(ZookeeperServiceInstance instance) {
return Try.of(() -> serviceDiscovery.queryForInstances(instance.name()))
.map(instances -> instances.stream()
.filter(i -> i.getAddress().equals(instance.host()) && i.getPort().equals(instance.port()))
.findFirst())
.get();
}
public Collection<String> queryForNames() throws Exception {
return serviceDiscovery.queryForNames();
}
public void registerService(ServiceInstance<T> serviceInstance) throws Exception {
serviceDiscovery.registerService(serviceInstance);
}
public void unregisterService(ServiceInstance<T> serviceInstance) throws Exception {
serviceDiscovery.unregisterService(serviceInstance);
}
public void unregisterService(ZookeeperServiceInstance serviceInstance) throws Exception {
doQueryForInstance(serviceInstance)
.ifPresent(server -> Try.run(() -> serviceDiscovery.unregisterService(server)).apply());
}
public void updateService(ServiceInstance<T> serviceInstance) throws Exception {
serviceDiscovery.updateService(serviceInstance);
}
public ServiceProviderBuilder<T> serviceProviderBuilder() {
return serviceDiscovery.serviceProviderBuilder();
}
public ServiceCacheBuilder<T> serviceCacheBuilder() {
return serviceDiscovery.serviceCacheBuilder();
}
@Override
public final void close() throws IOException {
if (!CuratorFrameworkState.STOPPED.equals(curator.getState())) {
serviceDiscovery.close();
curator.close();
}
}
private class RetryNever implements RetryPolicy {
@Override
public boolean allowRetry(int retryCount, long elapsedTimeMs, RetrySleeper sleeper) {
return false;
}
}
}
|
timboudreau/pumpernickel | src/main/java/com/pump/job/SampleJobExecutor.java | <reponame>timboudreau/pumpernickel
/**
* This software is released as part of the Pumpernickel project.
*
* All com.pump resources in the Pumpernickel project are distributed under the
* MIT License:
* https://raw.githubusercontent.com/mickleness/pumpernickel/master/License.txt
*
* More information about the Pumpernickel project is available here:
* https://mickleness.github.io/pumpernickel/
*/
package com.pump.job;
import java.util.LinkedList;
import java.util.List;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
/**
* This executes a {@link com.pump.job.SampleJob} across a given number of
* threads until all samples are processed, an error occurs, or
* <code>abort()</code> is invoked.
*
* You need to extend this class and override <code>processResults()</code> and
* <code>processThrowable()</code> to make sense of the job results.
*/
public abstract class SampleJobExecutor<T> {
public enum State {
ACTIVE, ABORTED, ERROR, FINISHED
}
State state = State.ACTIVE;
final long sampleCount;
final SampleJob<T> job;
final int sampleIncrement;
long startingIndex;
int activeThreads;
List<ChangeListener> stateListeners = new LinkedList<ChangeListener>();
final Thread[] threads;
Runnable threadRunnable = new Runnable() {
public void run() {
try {
while (State.ACTIVE.equals(state)) {
long startingIndex;
int sampleCount;
synchronized (SampleJobExecutor.this) {
startingIndex = SampleJobExecutor.this.startingIndex;
long remainingSamples = SampleJobExecutor.this.sampleCount
- startingIndex;
sampleCount = sampleIncrement;
if (remainingSamples < sampleCount)
sampleCount = (int) remainingSamples;
if (sampleCount == 0)
return;
SampleJobExecutor.this.startingIndex += sampleCount;
}
try {
T[] results = job.calculate(startingIndex, sampleCount);
processResults(startingIndex, sampleCount, results);
} catch (Throwable t) {
setState(State.ERROR);
processThrowable(startingIndex, sampleCount, t);
}
}
} finally {
activeThreads--;
if (activeThreads == 0) {
// if we exited normally: set the state to a healthy
// FINISHED
synchronized (SampleJobExecutor.this) {
if (State.ACTIVE.equals(state)) {
setState(State.FINISHED);
}
}
}
}
}
};
/**
* Start executing a job.
*
* @param job
* the job to execute.
* @param threadCount
* the number of threads to use.
* @param sampleIncrement
* the number of samples requested at a time. This must be an
* <code>int</code> (instead of a <code>long</code>) because
* we'll create an array of this size.
*/
public SampleJobExecutor(SampleJob<T> job, int threadCount,
int sampleIncrement) {
this.job = job;
this.sampleCount = job.getSampleCount();
this.sampleIncrement = sampleIncrement;
this.startingIndex = 0;
this.activeThreads = threadCount;
ThreadGroup threadGroup = new ThreadGroup(job.toString());
threads = new Thread[threadCount];
for (int a = 0; a < threadCount; a++) {
threads[a] = new Thread(threadGroup, threadRunnable, "thread-" + a);
}
}
/**
* Start all the threads this executor manages.
*
* @param join
* if <code>false</code> then this method returns immediately. If
* <code>true</code> then this method blocks until the execution
* is complete.
*/
public void start(boolean join) {
for (int a = 0; a < threads.length; a++) {
threads[a].start();
}
if (join)
join();
}
/**
* Return true if this changed the state to ABORTED. This method may be
* ignored if the state is already FINISHED or ERROR.
*/
public boolean abort() {
synchronized (SampleJobExecutor.this) {
if (State.FINISHED.equals(state) || State.ERROR.equals(state))
return false;
return setState(State.ABORTED);
}
}
/**
* Set the state.
*
* @param state
* the new state.
* @return true if a change occurred.
*/
protected boolean setState(State state) {
if (state == null)
throw new NullPointerException();
synchronized (SampleJobExecutor.this) {
if (this.state.equals(state))
return false;
this.state = state;
}
fireStateListeners();
return true;
}
protected void fireStateListeners() {
for (ChangeListener l : stateListeners) {
try {
l.stateChanged(new ChangeEvent(this));
} catch (Exception e) {
e.printStackTrace();
}
}
}
/**
* Add a listener to be notified when the state changes.
*
*/
public void addStateListener(ChangeListener l) {
if (!stateListeners.contains(l))
stateListeners.add(l);
}
/**
* Remove a listener.
*
*/
public void removeStateListener(ChangeListener l) {
stateListeners.remove(l);
}
/**
* Blocks until the job is fully executed, aborted, or an error occurs.
*
* <P>
* (That is: when this method returns the <code>getState()</code> method is
* guaranteed to not return <code>State.ACTIVE</code>.)
*/
public void join() {
boolean repeat = true;
while (repeat) {
repeat = false;
for (int a = 0; a < threads.length; a++) {
try {
threads[a].join();
} catch (InterruptedException e) {
repeat = true;
}
}
}
}
/**
* Process a series of results.
*
*
* @param index
* the initial index. The zereoth index of the
* <code>results</code> array is this index in the larger data
* model.
* @param length
* the number of results. The <code>results</code> array is
* always this size. (Or if it is larger: then the array is being
* recycled and you can ignore results after that.)
* @param results
* the results.
*/
protected abstract void processResults(long index, int length, T[] results);
/**
* Process an error that occurred when we tried to get a series of results.
*
* The default implementation of this class just outputs a little
* information (including the stacktrace) to System.err.
*
* @param index
* the initial requested index.
* @param length
* the number of results requested.
* @param throwable
* the error intercepted.
*/
protected void processThrowable(long index, long length, Throwable throwable) {
System.err.println("An error occurred processing samples [" + index
+ ", " + (index + length) + ")");
throwable.printStackTrace();
}
} |
Dearker/middleground | hanyi-daily/src/test/java/com/hanyi/daily/load/SystemTest.java | package com.hanyi.daily.load;
import cn.hutool.core.util.RuntimeUtil;
import com.hanyi.daily.property.Book;
import org.junit.Test;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
/**
* 系统Api测试类,
* System.load()/loadLibrary和RunTime.load/loadLibrary都是用来装载库文件,不论是JNI库文件还是非JNI库文件,即jar包
*
* @author <EMAIL>
* @since 2021-06-11 17:09
*/
public class SystemTest {
/**
* 获取系统属性
*/
@Test
public void systemEnvTest() {
Map<String, String> getenv = System.getenv();
getenv.forEach((k, v) -> System.out.println(k + " || " + v));
}
/**
* 系统属性测试
* <p>
* os.arch 操作系统的架构
* os.version 操作系统的版本
* file.separator 文件分隔符(在 UNIX 系统中是“/”)
* path.separator 路径分隔符(在 UNIX 系统中是“:”)
* line.separator 行分隔符(在 UNIX 系统中是“/n”)
* user.name 用户的账户名称
* user.home 用户的主目录
* user.dir 用户的当前工作目录
*/
@Test
public void systemPropertiesTest() {
Properties properties = System.getProperties();
properties.forEach((k, v) -> System.out.println(k + " || " + v));
System.out.println("-------------------");
System.out.println(System.getProperty("java.version"));
}
/**
* 如果两个对象的identityHashCode值相等,则两个对象绝对是同一个对象,即调用Object的hashcode()方法;
* 返回与默认方法hashCode()返回的给定对象相同的哈希码,无论给定对象的类是否覆盖了hashCode()。 空引用的哈希码为零。
* 如果在web请求中传入的为String类型,则每次都会创建一个新的String,即hashCode()值每次都不一样.
* 如果想每次相同的值计算出来的结果相同,使用HashUtil.jsHash()方法,如果传入的参数为对象的toString()则需要先重写其toString()
* 不同的两个对象的hashCode()计算出来的值可能相同;如果在程序内部自己使用可适当的使用System.identityHashCode()
*/
@Test
public void systemIdentityHashCodeTest() {
String a = "重地";
String b = "通话";
//相同
System.out.println(a.hashCode());
System.out.println(b.hashCode());
//不同
System.out.println(System.identityHashCode(a));
System.out.println(System.identityHashCode("重地"));
System.out.println(System.identityHashCode(b));
System.out.println("-------------------");
Book book = new Book("重地", 1);
Book book1 = new Book("通话", 1);
//相同
System.out.println(book.hashCode() + " || " + book1.hashCode());
//不同
System.out.println(System.identityHashCode(book) + " || " + System.identityHashCode(book1));
}
/**
* 系统退出测试
* 0 正常退出,程序正常执行结束退出,Java GC进行垃圾回收,直接退出
* 1 是非正常退出,就是说无论程序正在执行与否,都退出
*/
@Test
public void systemExitTest() {
System.out.println("111111");
//正常退出程序
System.exit(0);
//下面代码不会执行
System.out.println("111111222");
}
/**
* 系统行分隔符测试
* 在UNIX系统上,返回"\n" ; 在Microsoft Windows系统上,返回"\r\n"
*/
@Test
public void systemLineSeparatorTest() {
System.out.println(System.lineSeparator());
}
/**
* 运行时内存测试
*/
@Test
public void runTimeMemoryTest() {
Runtime runtime = Runtime.getRuntime();
System.out.println("Java虚拟机中的可用内存量:" + runtime.freeMemory());
System.out.println("Java虚拟机中的内存总量:" + runtime.totalMemory());
System.out.println("Java虚拟机将尝试使用的最大内存量:" + runtime.maxMemory());
}
/**
* 运行时执行命令测试
*
* @throws IOException 异常
*/
@Test
public void runTimeExecTest() throws IOException {
Runtime runtime = Runtime.getRuntime();
Process process = runtime.exec("ls -l");
System.out.println(process.getInputStream());
Process exec = RuntimeUtil.exec("ls -l");
System.out.println(exec);
String str = RuntimeUtil.execForStr("ipconfig");
System.out.println(str);
}
}
|
pablorochat/flappykhaled | node_modules/codecov.io/bin/codecov.io.js | #!/usr/bin/env node
var sendToCodeCov = require('../lib/sendToCodeCov.io');
process.stdin.resume();
process.stdin.setEncoding('utf8');
var input = '';
process.stdin.on('data', function(chunk) {
input += chunk;
});
process.stdin.on('end', function() {
sendToCodeCov(input, function(err) {
if (err) {
console.log("error sending to codecov.io: ", err, err.stack);
if (/non-success response/.test(err.message)){
console.log("detail: ", err.detail);
}
throw err;
}
});
});
|
acangiano/ruby-benchmark-suite | benchmarks/rdoc/bm_rdoc_against_itself_ri.rb | <filename>benchmarks/rdoc/bm_rdoc_against_itself_ri.rb<gh_stars>10-100
require './rdoc_bm_helper'
Bench.run [1] do |n|
# run rdoc against itself with ri
go 'rdp-rdoc-2.4.6', ['--ri']
end
|
MickeyPa/soen344 | application/models/Appointment.py | <reponame>MickeyPa/soen344
from index import db
import datetime
# PickleType coverts python object to a string so that it can be stored on the database
class Appointment(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
room = db.Column(db.Integer, db.ForeignKey('room.roomNumber'), nullable=False)
clinic_id = db.Column(db.Integer, db.ForeignKey('clinic.id'), nullable=False)
doctor_permit_number = db.Column(db.String(7), db.ForeignKey('doctor.permit_number'), nullable=False)
patient_hcnumber = db.Column(db.String(12), db.ForeignKey('patient.hcnumber'), nullable=False)
length = db.Column(db.Integer, nullable=False)
time = db.Column(db.String(), nullable=False)
date = db.Column(db.Date(), nullable=False)
def __iter__(self):
yield 'id', self.id
yield 'room', self.room
yield 'clinic_id', self.clinic_id
yield 'doctor_permit_number', self.doctor_permit_number
yield 'patient_hcnumber', self.patient_hcnumber
yield 'length', self.length
yield 'time', self.time
yield 'date', self.date.strftime("%Y-%m-%d")
# Initializes the database
db.create_all() |
tgodzik/intellij-community | plugins/hg4idea/src/org/zmlx/hg4idea/provider/commit/HgMQNewExecutor.java | // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.zmlx.hg4idea.provider.commit;
import com.intellij.openapi.vcs.changes.CommitContext;
import com.intellij.openapi.vcs.changes.CommitExecutor;
import com.intellij.openapi.vcs.changes.CommitSession;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
public class HgMQNewExecutor implements CommitExecutor {
//todo:should be moved to create patch dialog as an EP -> create patch with... MQ
@NotNull private final HgCheckinEnvironment myCheckinEnvironment;
public HgMQNewExecutor(@NotNull HgCheckinEnvironment checkinEnvironment) {
myCheckinEnvironment = checkinEnvironment;
}
@NotNull
@Nls
@Override
public String getActionText() {
return "Create M&Q Patch";
}
@NotNull
@Override
public CommitSession createCommitSession(@NotNull CommitContext commitContext) {
myCheckinEnvironment.setMqNew();
return CommitSession.VCS_COMMIT;
}
}
|
solotzg/tiflash | dbms/src/Parsers/ParserTablesInSelectQuery.h | // Copyright 2022 PingCAP, Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <Parsers/IParserBase.h>
namespace DB
{
/** List of single or multiple JOIN-ed tables or subqueries in SELECT query, with ARRAY JOINs and SAMPLE, FINAL modifiers.
*/
class ParserTablesInSelectQuery : public IParserBase
{
protected:
const char * getName() const { return "table, table function, subquery or list of joined tables"; }
bool parseImpl(Pos & pos, ASTPtr & node, Expected & expected);
};
class ParserTablesInSelectQueryElement : public IParserBase
{
public:
ParserTablesInSelectQueryElement(bool is_first) : is_first(is_first) {}
protected:
const char * getName() const { return "table, table function, subquery or list of joined tables"; }
bool parseImpl(Pos & pos, ASTPtr & node, Expected & expected);
private:
bool is_first;
};
class ParserTableExpression : public IParserBase
{
protected:
const char * getName() const { return "table or subquery or table function"; }
bool parseImpl(Pos & pos, ASTPtr & node, Expected & expected);
};
class ParserArrayJoin : public IParserBase
{
protected:
const char * getName() const { return "array join"; }
bool parseImpl(Pos & pos, ASTPtr & node, Expected & expected);
};
}
|
ucsd-progsys/nate | eval/sherrloc/src/sherrloc/constraint/analysis/CFLPathFinder.java | <filename>eval/sherrloc/src/sherrloc/constraint/analysis/CFLPathFinder.java<gh_stars>1-10
package sherrloc.constraint.analysis;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import sherrloc.graph.ConstraintEdge;
import sherrloc.graph.ConstraintGraph;
import sherrloc.graph.ConstructorEdge;
import sherrloc.graph.Edge;
import sherrloc.graph.EdgeCondition;
import sherrloc.graph.EmptyEdge;
import sherrloc.graph.JoinEdge;
import sherrloc.graph.LeftEdge;
import sherrloc.graph.LeqEdge;
import sherrloc.graph.MeetEdge;
import sherrloc.graph.Node;
import sherrloc.graph.ReductionEdge;
import sherrloc.graph.RightEdge;
/**
* Saturate a constraint graph according to a context-free-grammar with three
* types of edges:
* <ul>
* <li>LEQ: an inequality on node
* <li>LEFT: constructor edge
* <li>RIGHT: destructor edge
* </ul>
* <p>
* See the full grammar in the paper "Toward General Diagnosis of Static Errors"
* by <NAME> and <NAME>
*/
abstract public class CFLPathFinder implements PathFinder {
/** Edges used in CFL-reachablity algorithm */
protected LeqEdge[][] leqPath;
protected Map<EdgeCondition, LeftEdge>[][] leftPath;
// since the RIGHT edges are rare in a graph, and no right edges are
// inferred, using HashMap can be more memory efficient than arrays
protected Map<Integer, Map<Integer, List<RightEdge>>> rightPath;
/** other fields */
protected boolean initialized = false;
protected final ConstraintGraph g;
/**
* @param graph
* A graph to be saturated
*/
public CFLPathFinder(ConstraintGraph graph) {
g = graph;
int size = g.getAllNodes().size();
leqPath = new LeqEdge[size][size];
leftPath = new HashMap[size][size];
rightPath = new HashMap<Integer, Map<Integer, List<RightEdge>>>();
for (Node start : g.getAllNodes()) {
for (Node end : g.getAllNodes()) {
int sIndex = start.getIndex();
int eIndex = end.getIndex();
leqPath[sIndex][eIndex] = null;
leftPath[sIndex][eIndex] = new HashMap<EdgeCondition, LeftEdge>();
}
}
}
/**
* Add a {@link ReductionEdge} to the graph
*
* @param edge
* An edge to be added
*/
abstract protected void addEdge(ReductionEdge edge);
/**
* Return all {@link RightEdge}s from <code>fIndex</code> to
* <code>tIndex</code>
*
* @param fIndex
* Start node
* @param tIndex
* End node
* @return All {@link RightEdge}s from <code>fIndex</code> to
* <code>tIndex</code>
*/
protected List<RightEdge> getRightEdges(int fIndex, int tIndex) {
if (hasRightEdges(fIndex, tIndex)) {
return rightPath.get(fIndex).get(tIndex);
} else
return new ArrayList<RightEdge>();
}
/**
* Return true if there is at least one {@link RightEdge} from
* <code>fIndex</code> to <code>tIndex</code>
*
* @param fIndex
* Start node
* @param tIndex
* End node
* @return True if there is at least one {@link RightEdge} from
* <code>fIndex</code> to <code>tIndex</code>
*/
protected boolean hasRightEdges(int fIndex, int tIndex) {
if (rightPath.containsKey(fIndex) && rightPath.get(fIndex).containsKey(tIndex)) {
return true;
}
else
return false;
}
/**
* Convert all graph edges into {@link ReductionEdge}s
*/
private void initialize() {
List<Edge> edges = g.getAllEdges();
for (Edge edge : edges) {
if (edge instanceof ConstraintEdge || edge instanceof MeetEdge
|| edge instanceof JoinEdge) {
addEdge(new LeqEdge(edge, EmptyEdge.getInstance()));
} else if (edge instanceof ConstructorEdge) {
ConstructorEdge e = (ConstructorEdge) edge;
if (e.getCondition().isReverse()) {
addEdge(new RightEdge(e.getCondition(), edge, EmptyEdge.getInstance()));
} else {
addEdge(new LeftEdge(e.getCondition(), edge, EmptyEdge.getInstance()));
}
}
}
}
/**
* Return a path in the constraint graph so that a partial ordering on
* <code>start, end</code> can be derived from constraints along the path.
* Return null when no such path exits
*
* @param verbose
* Set true to output saturation time
*/
public List<Edge> getPath(Node start, Node end, boolean verbose) {
if (!initialized) {
long startTime = System.currentTimeMillis();
initialize();
saturation();
initialized = true;
long endTime = System.currentTimeMillis();
if (verbose)
System.out.println("path_finding time: " + (endTime - startTime));
}
LeqEdge path = getLeqPath(start, end);
if (path != null)
return path.getEdges();
else
return null;
}
/**
* Return an LEQ path from <code>start</code> to <code>end</code>
*
* @param start
* Start node
* @param end
* End node
* @return An LEQ path
*/
protected LeqEdge getLeqPath(Node start, Node end) {
int sIndex = start.getIndex();
int eIndex = end.getIndex();
return leqPath[sIndex][eIndex];
}
/**
* Saturate the constraint graph
*/
abstract protected void saturation();
}
|
swanhong/CiFEr | include/cifer/sample/normal_cumulative.h | <gh_stars>10-100
/*
* Copyright (c) 2018 XLAB d.o.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CIFER_NORMAL_CUMULATIVE_H
#define CIFER_NORMAL_CUMULATIVE_H
#include "cifer/sample/normal.h"
/**
* \file
* \ingroup sample
* \brief Normal cumulative sampler
*/
/**
* cfe_normal_cumulative samples random values from the cumulative normal (Gaussian)
* probability distribution, centered on 0.
* This sampler is the fastest, but is limited only to cases when sigma is not
* too big, due to the sizes of the precomputed tables. Note that
* the sampler offers arbitrary precision but the implementation is not
* constant time.
*/
typedef struct cfe_normal_cumulative {
cfe_normal nor;
cfe_vec precomputed; // table of precomputed values relative to the cumulative distribution
bool two_sided; // twoSided defines if we limit sampling only to non-negative integers or to all
mpz_t sample_size; // integer defining from how big of an interval do we need to sample uniformly to sample according to discrete Gauss
} cfe_normal_cumulative;
/**
* Initializes an instance of cfe_normal_cumulative sampler. It assumes mean = 0.
* Values are precomputed when this function is called, so that
* normal_cumulative_sample merely returns a precomputed value.
*
* @param s A pointer to an uninitialized struct representing the sampler
* @param sigma Standard deviation
* @param n Precision parameter
* @param two_sided Boolean defining if we limit only to non-negative integers
*/
void cfe_normal_cumulative_init(cfe_normal_cumulative *s, mpf_t sigma, size_t n, bool two_sided);
/**
* Frees the memory occupied by the struct members. It does not free
* memory occupied by the struct itself.
*
* @param s A pointer to an instance of the sampler (*initialized*
* cfe_normal_cumulative struct)
*/
void cfe_normal_cumulative_free(cfe_normal_cumulative *s);
/**
* Samples discrete cumulative distribution with precomputed values.
*
* @param res The random number (result value will be stored here)
* @param s A pointer to an instance of the sampler (*initialized*
* cfe_normal_cumulative struct)
*/
void cfe_normal_cumulative_sample(mpz_t res, cfe_normal_cumulative *s);
/**
* Precomputes the values for sampling. This can be used only if sigma is not
* too big.
*
* @param s A pointer to an instance of the sampler (*initialized*
* cfe_normal_cumulative struct)
*/
void cfe_normal_cumulative_precompute(cfe_normal_cumulative *s);
/**
* Given a random value from an interval and a cumulative distribution of mpz_t
* values, it returns a sample distributed accordingly using binary search
*
* @param sample Target integer
* @param start Start index
* @param end End index
* @param v Vector of values
* @param middle_value Temporary placeholder for value comparison (to avoid
* unnecessary memory allocations)
* @return index of target integer
*/
size_t cfe_locate_int(mpz_t sample, size_t start, size_t end, cfe_vec *v, mpz_t middle_value);
/**
* Sets the elements of a vector to random numbers with the normal_cumulative sampler.
*/
void cfe_normal_cumulative_sample_vec(cfe_vec *res, cfe_normal_cumulative *s);
/**
* Sets the elements of a matrix to random numbers with the normal_cumulative sampler.
*/
void cfe_normal_cumulative_sample_mat(cfe_mat *res, cfe_normal_cumulative *s);
#endif
|
BryanNoller/OrionUO | OrionUO/GameObjects/GamePlayer.cpp | <filename>OrionUO/GameObjects/GamePlayer.cpp
// This is an open source non-commercial project. Dear PVS-Studio, please check it.
// PVS-Studio Static Code Analyzer for C, C++ and C#: http://www.viva64.com
/***********************************************************************************
**
** GamePlayer.cpp
**
** Copyright (C) August 2016 Hotride
**
************************************************************************************
*/
//----------------------------------------------------------------------------------
#include "stdafx.h"
//----------------------------------------------------------------------------------
CPlayer *g_Player = NULL;
//----------------------------------------------------------------------------------
CPlayer::CPlayer(int serial)
: CGameCharacter(serial)
{
WISPFUN_DEBUG("c21_f1");
CPacketSkillsRequest(Serial).Send();
}
//---------------------------------------------------------------------------
CPlayer::~CPlayer()
{
WISPFUN_DEBUG("c21_f2");
}
//---------------------------------------------------------------------------
void CPlayer::CloseBank()
{
CGameItem *bank = FindLayer(OL_BANK);
if (bank != NULL && bank->Opened)
{
bank->Clear();
bank->Opened = false;
g_GumpManager.CloseGump(bank->Serial, 0, GT_CONTAINER);
}
}
//---------------------------------------------------------------------------
/*!
Поиск бинтов в сумке (и подсумках)
@return Ссылка на бинт или NULL
*/
CGameItem *CPlayer::FindBandage()
{
WISPFUN_DEBUG("c21_f11");
CGameItem *item = FindLayer(OL_BACKPACK);
if (item != NULL)
item = item->FindItem(0x0E21);
return item;
}
//---------------------------------------------------------------------------
void CPlayer::UpdateAbilities()
{
WISPFUN_DEBUG("c21_f12");
ushort equippedGraphic = 0;
CGameItem *layerObject = g_Player->FindLayer(OL_1_HAND);
if (layerObject != NULL)
{
equippedGraphic = layerObject->Graphic;
}
else
{
layerObject = g_Player->FindLayer(OL_2_HAND);
if (layerObject != NULL)
equippedGraphic = layerObject->Graphic;
}
g_Ability[0] = 0xFF;
g_Ability[1] = 0xFF;
if (equippedGraphic)
{
ushort graphics[2] = { equippedGraphic, 0 };
ushort imageID = layerObject->GetStaticData()->AnimID;
int count = 1;
ushort testGraphic = equippedGraphic - 1;
if (g_Orion.m_StaticData[testGraphic].AnimID == imageID)
{
graphics[1] = testGraphic;
count = 2;
}
else
{
testGraphic = equippedGraphic + 1;
if (g_Orion.m_StaticData[testGraphic].AnimID == imageID)
{
graphics[1] = testGraphic;
count = 2;
}
}
IFOR (i, 0, count)
{
switch (graphics[i])
{
case 0x0901: // Gargish Cyclone
g_Ability[0] = AT_MOVING_SHOT;
g_Ability[1] = AT_INFUSED_THROW;
break;
case 0x0902: // Gargish Dagger
g_Ability[0] = AT_INFECTING;
g_Ability[1] = AT_SHADOW_STRIKE;
break;
case 0x0905: // Glass Staff
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x0906: // serpentstone staff
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_DISMOUNT;
break;
case 0x090C: // Glass Sword
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x0DF0:
case 0x0DF1: // Black Staves
g_Ability[0] = AT_WHIRLWIND_ATTACK;
g_Ability[1] = AT_PARALYZING_BLOW;
break;
case 0x0DF2:
case 0x0DF3:
case 0x0DF4:
case 0x0DF5: // Wands Type A-D
g_Ability[0] = AT_DISMOUNT;
g_Ability[1] = AT_DISARM;
break;
case 0x0E81:
case 0x0E82: // Shepherd's Crooks
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_DISARM;
break;
case 0x0E85:
case 0x0E86: // Pickaxes
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_DISARM;
break;
case 0x0E87:
case 0x0E88: // Pitchforks
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_DISMOUNT;
break;
case 0x0E89:
case 0x0E8A: // Quarter Staves
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x0EC2:
case 0x0EC3: // Cleavers
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_INFECTING;
break;
case 0x0EC4:
case 0x0EC5: // Skinning Knives
g_Ability[0] = AT_SHADOW_STRIKE;
g_Ability[1] = AT_DISARM;
break;
case 0x0F43:
case 0x0F44: // Hatchets
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_DISARM;
break;
case 0x0F45:
case 0x0F46: // Double Axes
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x0F47:
case 0x0F48: // Battle Axes
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x0F49:
case 0x0F4A: // Axes
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_DISMOUNT;
break;
case 0x0F4B:
case 0x0F4C:
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_WHIRLWIND_ATTACK;
break;
case 0x0F4D:
case 0x0F4E: // Bardiches
g_Ability[0] = AT_PARALYZING_BLOW;
g_Ability[1] = AT_DISMOUNT;
break;
case 0x0F4F:
case 0x0F50: // Crossbows
g_Ability[0] = AT_CONCUSSION_BLOW;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x0F51:
case 0x0F52: // Daggers
g_Ability[0] = AT_INFECTING;
g_Ability[1] = AT_SHADOW_STRIKE;
break;
case 0x0F5C:
case 0x0F5D: // Maces
g_Ability[0] = AT_CONCUSSION_BLOW;
g_Ability[1] = AT_DISARM;
break;
case 0x0F5E:
case 0x0F5F: // Broadswords
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_ARMOR_IGNORE;
break;
case 0x0F60:
case 0x0F61: // Longswords
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x0F62:
case 0x0F63: // Spears
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_PARALYZING_BLOW;
break;
case 0x0FB5:
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_SHADOW_STRIKE;
break;
case 0x13AF:
case 0x13B0: // War Axes
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_BLEED_ATTACK;
break;
case 0x13B1:
case 0x13B2: // Bows
g_Ability[0] = AT_PARALYZING_BLOW;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x13B3:
case 0x13B4: // Clubs
g_Ability[0] = AT_SHADOW_STRIKE;
g_Ability[1] = AT_DISMOUNT;
break;
case 0x13B7:
case 0x13B8: // Scimitars
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_PARALYZING_BLOW;
break;
case 0x13B9:
case 0x13BA: // Viking Swords
g_Ability[0] = AT_PARALYZING_BLOW;
g_Ability[1] = AT_CRUSHING_BLOW;
break;
case 0x13FD: // Heavy Crossbows
g_Ability[0] = AT_MOVING_SHOT;
g_Ability[1] = AT_DISMOUNT;
break;
case 0x13E3: // Smith's Hammers
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_SHADOW_STRIKE;
break;
case 0x13F6: // Butcher Knives
g_Ability[0] = AT_INFECTING;
g_Ability[1] = AT_DISARM;
break;
case 0x13F8: // Gnarled Staves
g_Ability[0] = AT_CONCUSSION_BLOW;
g_Ability[1] = AT_PARALYZING_BLOW;
break;
case 0x13FB: // Large Battle Axes
g_Ability[0] = AT_WHIRLWIND_ATTACK;
g_Ability[1] = AT_BLEED_ATTACK;
break;
case 0x13FF: // Katana
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_ARMOR_IGNORE;
break;
case 0x1401: // Kryss
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_INFECTING;
break;
case 0x1402:
case 0x1403: // Short Spears
g_Ability[0] = AT_SHADOW_STRIKE;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x1404:
case 0x1405: // War Forks
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_DISARM;
break;
case 0x1406:
case 0x1407: // War Maces
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_BLEED_ATTACK;
break;
case 0x1438:
case 0x1439: // War Hammers
g_Ability[0] = AT_WHIRLWIND_ATTACK;
g_Ability[1] = AT_CRUSHING_BLOW;
break;
case 0x143A:
case 0x143B: // Mauls
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x143C:
case 0x143D: // Hammer Picks
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x143E:
case 0x143F: // Halberds
g_Ability[0] = AT_WHIRLWIND_ATTACK;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x1440:
case 0x1441: // Cutlasses
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_SHADOW_STRIKE;
break;
case 0x1442:
case 0x1443: // Two Handed Axes
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_SHADOW_STRIKE;
break;
case 0x26BA: // Scythes
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_PARALYZING_BLOW;
break;
case 0x26BB: // Bone Harvesters
g_Ability[0] = AT_PARALYZING_BLOW;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x26BC: // Scepters
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x26BD: // Bladed Staves
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_DISMOUNT;
break;
case 0x26BE: // Pikes
g_Ability[0] = AT_PARALYZING_BLOW;
g_Ability[1] = AT_INFECTING;
break;
case 0x26BF: // Double Bladed Staff
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_INFECTING;
break;
case 0x26C0: // Lances
g_Ability[0] = AT_DISMOUNT;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x26C1: // Crescent Blades
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x26C2: // Composite Bows
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_MOVING_SHOT;
break;
case 0x26C3: // Repeating Crossbows
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_MOVING_SHOT;
break;
case 0x26C4: // also Scythes
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_PARALYZING_BLOW;
break;
case 0x26C5: // also Bone Harvesters
g_Ability[0] = AT_PARALYZING_BLOW;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x26C6: // also Scepters
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x26C7: // also Bladed Staves
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_DISMOUNT;
break;
case 0x26C8: // also Pikes
g_Ability[0] = AT_PARALYZING_BLOW;
g_Ability[1] = AT_INFECTING;
break;
case 0x26C9: // also Double Bladed Staff
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_INFECTING;
break;
case 0x26CA: // also Lances
g_Ability[0] = AT_DISMOUNT;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x26CB: // also Crescent Blades
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x26CC: // also Composite Bows
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_MOVING_SHOT;
break;
case 0x26CD: // also Repeating Crossbows
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_MOVING_SHOT;
break;
case 0x27A2: // No-Dachi
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_RIDING_SWIPE;
break;
case 0x27A3: // Tessen
g_Ability[0] = AT_FEINT;
g_Ability[1] = AT_BLOCK;
break;
case 0x27A4: // Wakizashi
g_Ability[0] = AT_FRENZIED_WHIRLWIND;
g_Ability[1] = AT_DOUBLE_STRIKE;
break;
case 0x27A5: // Yumi
g_Ability[0] = AT_ARMOR_PIERCE;
g_Ability[1] = AT_DOUBLE_SHOT;
break;
case 0x27A6: // Tetsubo
g_Ability[0] = AT_FRENZIED_WHIRLWIND;
g_Ability[1] = AT_CRUSHING_BLOW;
break;
case 0x27A7: // Lajatang
g_Ability[0] = AT_DEFENSE_MASTERY;
g_Ability[1] = AT_FRENZIED_WHIRLWIND;
break;
case 0x27A8: // Bokuto
g_Ability[0] = AT_FEINT;
g_Ability[1] = AT_NERVE_STRIKE;
break;
case 0x27A9: // Daisho
g_Ability[0] = AT_FEINT;
g_Ability[1] = AT_DOUBLE_STRIKE;
break;
case 0x27AA: // Fukya
g_Ability[0] = AT_DISARM;
g_Ability[1] = AT_PARALYZING_BLOW;
break;
case 0x27AB: // Tekagi
g_Ability[0] = AT_DUAL_WIELD;
g_Ability[1] = AT_TALON_STRIKE;
break;
case 0x27AD: // Kama
g_Ability[0] = AT_WHIRLWIND_ATTACK;
g_Ability[1] = AT_DEFENSE_MASTERY;
break;
case 0x27AE: // Nunchaku
g_Ability[0] = AT_BLOCK;
g_Ability[1] = AT_FEINT;
break;
case 0x27AF: // Sai
g_Ability[0] = AT_BLOCK;
g_Ability[1] = AT_ARMOR_PIERCE;
break;
case 0x27ED: // also No-Dachi
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_RIDING_SWIPE;
break;
case 0x27EE: // also Tessen
g_Ability[0] = AT_FEINT;
g_Ability[1] = AT_BLOCK;
break;
case 0x27EF: // also Wakizashi
g_Ability[0] = AT_FRENZIED_WHIRLWIND;
g_Ability[1] = AT_DOUBLE_STRIKE;
break;
case 0x27F0: // also Yumi
g_Ability[0] = AT_ARMOR_PIERCE;
g_Ability[1] = AT_DOUBLE_SHOT;
break;
case 0x27F1: // also Tetsubo
g_Ability[0] = AT_FRENZIED_WHIRLWIND;
g_Ability[1] = AT_CRUSHING_BLOW;
break;
case 0x27F2: // also Lajatang
g_Ability[0] = AT_DEFENSE_MASTERY;
g_Ability[1] = AT_FRENZIED_WHIRLWIND;
break;
case 0x27F3: // also Bokuto
g_Ability[0] = AT_FEINT;
g_Ability[1] = AT_NERVE_STRIKE;
break;
case 0x27F4: // also Daisho
g_Ability[0] = AT_FEINT;
g_Ability[1] = AT_DOUBLE_STRIKE;
break;
case 0x27F5: // also Fukya
g_Ability[0] = AT_DISARM;
g_Ability[1] = AT_PARALYZING_BLOW;
break;
case 0x27F6: // also Tekagi
g_Ability[0] = AT_DUAL_WIELD;
g_Ability[1] = AT_TALON_STRIKE;
break;
case 0x27F8: // Kama
g_Ability[0] = AT_WHIRLWIND_ATTACK;
g_Ability[1] = AT_DEFENSE_MASTERY;
break;
case 0x27F9: // Nunchaku
g_Ability[0] = AT_BLOCK;
g_Ability[1] = AT_FEINT;
break;
case 0x27FA: // Sai
g_Ability[0] = AT_BLOCK;
g_Ability[1] = AT_ARMOR_PIERCE;
break;
case 0x2D1E: // Elven Composite Longbows
g_Ability[0] = AT_FORCE_ARROW;
g_Ability[1] = AT_SERPENT_ARROW;
break;
case 0x2D1F: // Magical Shortbows
g_Ability[0] = AT_LIGHTNING_ARROW;
g_Ability[1] = AT_PSYCHIC_ATTACK;
break;
case 0x2D20: // Elven Spellblades
g_Ability[0] = AT_PSYCHIC_ATTACK;
g_Ability[1] = AT_BLEED_ATTACK;
break;
case 0x2D21: // Assassin Spikes
g_Ability[0] = AT_INFECTING;
g_Ability[1] = AT_SHADOW_STRIKE;
break;
case 0x2D22: // Leafblades
g_Ability[0] = AT_FEINT;
g_Ability[1] = AT_ARMOR_IGNORE;
break;
case 0x2D23: // War Cleavers
g_Ability[0] = AT_DISARM;
g_Ability[1] = AT_BLADEWEAVE;
break;
case 0x2D24: // Diamond Maces
g_Ability[0] = AT_CONCUSSION_BLOW;
g_Ability[1] = AT_CRUSHING_BLOW;
break;
case 0x2D25: // Wild Staves
g_Ability[0] = AT_BLOCK;
g_Ability[1] = AT_FORCE_OF_NATURE;
break;
case 0x2D26: // Rune Blades
g_Ability[0] = AT_DISARM;
g_Ability[1] = AT_BLADEWEAVE;
break;
case 0x2D27: // Radiant Scimitars
g_Ability[0] = AT_WHIRLWIND_ATTACK;
g_Ability[1] = AT_BLADEWEAVE;
break;
case 0x2D28: // Ornate Axes
g_Ability[0] = AT_DISARM;
g_Ability[1] = AT_CRUSHING_BLOW;
break;
case 0x2D29: // Elven Machetes
g_Ability[0] = AT_DEFENSE_MASTERY;
g_Ability[1] = AT_BLADEWEAVE;
break;
case 0x2D2A: // also Elven Composite Longbows
g_Ability[0] = AT_FORCE_ARROW;
g_Ability[1] = AT_SERPENT_ARROW;
break;
case 0x2D2B: // also Magical Shortbows
g_Ability[0] = AT_LIGHTNING_ARROW;
g_Ability[1] = AT_PSYCHIC_ATTACK;
break;
case 0x2D2C: // also Elven Spellblades
g_Ability[0] = AT_PSYCHIC_ATTACK;
g_Ability[1] = AT_BLEED_ATTACK;
break;
case 0x2D2D: // also Assassin Spikes
g_Ability[0] = AT_INFECTING;
g_Ability[1] = AT_SHADOW_STRIKE;
break;
case 0x2D2E: // also Leafblades
g_Ability[0] = AT_FEINT;
g_Ability[1] = AT_ARMOR_IGNORE;
break;
case 0x2D2F: // also War Cleavers
g_Ability[0] = AT_DISARM;
g_Ability[1] = AT_BLADEWEAVE;
break;
case 0x2D30: // also Diamond Maces
g_Ability[0] = AT_CONCUSSION_BLOW;
g_Ability[1] = AT_CRUSHING_BLOW;
break;
case 0x2D31: // also Wild Staves
g_Ability[0] = AT_BLOCK;
g_Ability[1] = AT_FORCE_OF_NATURE;
break;
case 0x2D32: // also Rune Blades
g_Ability[0] = AT_DISARM;
g_Ability[1] = AT_BLADEWEAVE;
break;
case 0x2D33: // also Radiant Scimitars
g_Ability[0] = AT_WHIRLWIND_ATTACK;
g_Ability[1] = AT_BLADEWEAVE;
break;
case 0x2D34: // also Ornate Axes
g_Ability[0] = AT_DISARM;
g_Ability[1] = AT_CRUSHING_BLOW;
break;
case 0x2D35: // also Elven Machetes
g_Ability[0] = AT_DEFENSE_MASTERY;
g_Ability[1] = AT_BLADEWEAVE;
break;
case 0x4067: // Boomerang
g_Ability[0] = AT_MYSTIC_ARC;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x4068: // Dual Short Axes
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_INFECTING;
break;
case 0x406B: // Soul Glaive
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x406C: // Cyclone
g_Ability[0] = AT_MOVING_SHOT;
g_Ability[1] = AT_INFUSED_THROW;
break;
case 0x406D: // Dual Pointed Spear
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_DISARM;
break;
case 0x406E: // Disc Mace
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_DISARM;
break;
case 0x4072: // Blood Blade
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_PARALYZING_BLOW;
break;
case 0x4074: // Dread Sword
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x4075: // Gargish Talwar
g_Ability[0] = AT_WHIRLWIND_ATTACK;
g_Ability[1] = AT_DISMOUNT;
break;
case 0x4076: // Shortblade
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x48AE: // Gargish Cleaver
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_INFECTING;
break;
case 0x48B0: // Gargish Battle Axe
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x48B2: // Gargish Axe
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_DISMOUNT;
break;
case 0x48B4: // Gargish Bardiche
g_Ability[0] = AT_PARALYZING_BLOW;
g_Ability[1] = AT_DISMOUNT;
break;
case 0x48B6: // Gargish Butcher Knife
g_Ability[0] = AT_INFECTING;
g_Ability[1] = AT_DISARM;
break;
case 0x48B8: // Gargish Gnarled Staff
g_Ability[0] = AT_CONCUSSION_BLOW;
g_Ability[1] = AT_PARALYZING_BLOW;
break;
case 0x48BA: // Gargish Katana
g_Ability[0] = AT_DOUBLE_STRIKE;
g_Ability[1] = AT_ARMOR_IGNORE;
break;
case 0x48BC: // Gargish Kryss
g_Ability[0] = AT_ARMOR_IGNORE;
g_Ability[1] = AT_INFECTING;
break;
case 0x48BE: // Gargish War Fork
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_DISARM;
break;
case 0x48CA: // Gargish Lance
g_Ability[0] = AT_DISMOUNT;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x48C0: // Gargish War Hammer
g_Ability[0] = AT_WHIRLWIND_ATTACK;
g_Ability[1] = AT_CRUSHING_BLOW;
break;
case 0x48C2: // Gargish Maul
g_Ability[0] = AT_CRUSHING_BLOW;
g_Ability[1] = AT_CONCUSSION_BLOW;
break;
case 0x48C4: // Gargish Scyte
g_Ability[0] = AT_BLEED_ATTACK;
g_Ability[1] = AT_PARALYZING_BLOW;
break;
case 0x48C6: // Gargish Bone Harvester
g_Ability[0] = AT_PARALYZING_BLOW;
g_Ability[1] = AT_MORTAL_STRIKE;
break;
case 0x48C8: // Gargish Pike
g_Ability[0] = AT_PARALYZING_BLOW;
g_Ability[1] = AT_INFECTING;
break;
case 0x48CD: // Gargish Tessen
g_Ability[0] = AT_FEINT;
g_Ability[1] = AT_BLOCK;
break;
case 0x48CE: // Gargish Tekagi
g_Ability[0] = AT_DUAL_WIELD;
g_Ability[1] = AT_TALON_STRIKE;
break;
case 0x48D0: // Gargish Daisho
g_Ability[0] = AT_FEINT;
g_Ability[1] = AT_DOUBLE_STRIKE;
break;
default:
break;
}
if (g_Ability[0] != 0xFF)
break;
}
}
if (g_Ability[0] == 0xFF)
{
g_Ability[0] = AT_DISARM;
g_Ability[1] = AT_PARALYZING_BLOW;
}
g_GumpManager.UpdateContent(0, 0, GT_ABILITY);
g_GumpManager.UpdateContent(1, 0, GT_ABILITY);
g_GumpManager.UpdateContent(0, 0, GT_COMBAT_BOOK);
}
//--------------------------------------------------------------------------- |
AntJLopez/django_wedding | wedding/views.py | <gh_stars>0
from django.shortcuts import render, redirect, get_object_or_404 # noqa
from django.urls import reverse
from django.contrib.auth.decorators import login_required # noqa
from .local_settings import GOOGLE_API_KEY, STRIPE_PUBLIC_KEY
from guests.models import Guest, Activity
from guests.forms import RSVPForm
from payments.forms import GiftForm
def home(request, gift_form=GiftForm(), rsvp_form=RSVPForm()):
sections = []
sections.append({
'template': 'wedding/sections/header.html'})
sections.append({
'title': 'Our Story',
'template': 'wedding/sections/our_story.html'})
sections.append({
'title': 'The Proposal',
'template': 'wedding/sections/proposal.html'})
# sections.append({
# 'title': 'Wedding Party',
# 'template': 'wedding/sections/wedding_party.html'})
# sections.append({
# 'title': 'Family',
# 'template': 'wedding/sections/family.html'})
sections.append({
'title': 'Schedule',
'template': 'wedding/sections/schedule.html'})
sections.append({
'template': 'wedding/sections/map.html'})
# sections.append({
# 'title': 'Getting There',
# 'template': 'wedding/sections/directions.html'})
sections.append({
'title': 'Accommodations',
'template': 'wedding/sections/accommodations.html'})
sections.append({
'title': 'Gifts',
'template': 'wedding/sections/gifts.html'})
sections.append({
'title': 'RSVP',
'template': 'wedding/sections/rsvp.html'})
params = {
'sections': sections,
'google_api_key': GOOGLE_API_KEY,
'stripe_public_key': STRIPE_PUBLIC_KEY,
'gift_form': gift_form,
'rsvp_form': rsvp_form,
'activities': Activity.objects.all(),
'onsite_cost_url': reverse('onsite_cost'),
}
try:
guest = Guest.objects.get(id=request.session['guest'])
params['guest'] = guest
except (KeyError, ValueError):
pass
return render(request, 'wedding/home.html', params)
def guest_login(request, guest_username):
try:
guest = Guest.objects.get(username=guest_username)
request.session['guest'] = guest.id
except Guest.DoesNotExist:
pass
return redirect('home')
def guest_logout(request):
try:
del request.session['guest']
except KeyError:
pass
return redirect('home')
|
cemot/Attendance-via-Face-Recognition | Frontend/src/screens/LogInScreen/AdminLoginScreen.js | import React, { Component } from 'react';
import {
ScrollView,
View,
} from 'react-native';
import {Input, Header, Text, Button} from 'react-native-elements';
import Storage from '../../storage/Storage.js';
import {AppContext} from '../../../Contexts.js';
import {modes, makeCancelablePromise} from '../../../Constants.js';
/**
* UI Component to render the content for logging into the application as a faculty.
* This component applies to the faculties.
*/
export default class AdminLoginScreen extends Component {
/**
* Getting the current nearest context to get the data from.
* This context will have id and token of the faculty to authenticate him on the server
* along with other useful information.
*/
static contextType = AppContext;
constructor(props){
super(props);
this.state = {
Email: {
hasError: false,
errorMessage: '',
value: '',
},
Password: {
hasError: false,
errorMessage: '',
value: '',
},
hasError: false,
errorMessage: '',
isLoading: false,
};
// Array of all the async tasks(promises).
this.promises = [];
// Binding all the functions to current context so that they can be called
// from the context of other components as well.
this.validate = this.validate.bind(this);
this.onLoginPress = this.onLoginPress.bind(this);
}
componentWillUnmount(){
for (let prom of this.promises) {
// Cancelling any pending promises on unmount.
prom.cancel();
}
}
/**
* The function which is called before the faculty submits
* the login credentials to the server.
* Does Basic Validation of all the entered credentials.
*/
validate(){
let isValid = true;
if (this.state.Email.value.length === 0){
this.setState({
Email : {
hasError: true,
errorMessage: 'This field cannot be empty',
},
});
isValid = false;
}
if (this.state.Password.value.length === 0){
this.setState({
Password : {
hasError: true,
errorMessage: 'This field cannot be empty',
},
});
isValid = false;
}
return isValid;
}
/**
* The function which is called when the user submits the credentials to the server.
*/
onLoginPress(){
// Primary validation of Email and Password.
if (this.validate()){
this.setState({
isLoading: true,
}, () => {
let cancFetch = makeCancelablePromise(fetch(this.context.domain + '/api/auth/signin/faculty/', {
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
email: this.state.Email.value,
password: this.state.Password.value,
}),
method: 'POST',
}));
cancFetch.promise.then(async data => {
if (data.status === 200){
return data.json();
} else if (data.headers['Content-Type'] !== 'application/json'){
let err = new Error('Server uses unsupported data format');
err.isCanceled = false;
return Promise.reject(err);
}
let {error} = await data.json();
error.isCanceled = false;
return Promise.reject(error);
}).then(body=>{
let {id, token} = body;
this.setState({
isLoading: false,
}, () => {
this.context.changeAppState({
id, token,
isLoggedIn: true,
mode: modes.ADMIN,
});
});
Storage.setItem('admin:id', id).then(() => {
return Storage.setItem('admin:token', token);
}).then(() => {
console.log('admin id & token saved successfully!');
}).catch((err) => {
console.log('Failed to save admin id & token.\n Error: ' + err.message);
});
}).catch(err => {
if (!err.isCanceled){
this.setState({
hasError: true,
isLoading: false,
errorMessage: err.message,
});
}
});
this.promises.push(cancFetch);
});
}
}
render() {
return (
<ScrollView style={{padding: 10}}>
<Header
centerComponent = {{text: 'Faculty Log In', style: { color: '#fff', fontSize: 32, marginBottom: 20 }}}
/>
{this.state.hasError ?
<>
<Text style={{color: 'red'}}> {this.state.errorMessage} </Text>
</> :
<></>
}
<Input
placeholder="Email"
onChangeText={(Email) => this.setState({Email : { hasError: false, value: Email }})}
value={this.state.Email.value}
errorMessage={this.state.Email.hasError ? this.state.Email.errorMessage : undefined}
errorStyle={{color: 'red'}}
/>
<Input
placeholder="Password"
secureTextEntry={true}
onChangeText={(Password) => this.setState({Password : { hasError: false, value: Password }})}
value={this.state.Password.value}
errorMessage={this.state.Password.hasError ? this.state.Password.errorMessage : undefined}
errorStyle={{color: 'red'}}
/>
<View style={{margin: 7}} />
<Button
onPress={() => this.onLoginPress()}
title="Log In"
loading={this.state.isLoading}
disabled={this.state.isLoading}
type="outline"
containerStyle={{width: '80%' , marginLeft: '10%'}}
/>
<View style={styles.verticalRightLayout}>
<Text
style={styles.switchLoginMode}
onPress={() => this.context.changeAppState({
openAdminPages: false,
openSignUpPage: false,
})} >
Not a Faculty? Login As Student
</Text>
</View>
<View style={styles.verticalRightLayout}>
<Text
style={styles.switchLoginMode}
onPress={() => this.context.changeAppState({
openSignUpPage: true,
openAdminPages: false,
})}>
Or Register as Student
</Text>
</View>
</ScrollView>
);
}
}
let styles = {
switchLoginMode: {
fontSize: 16,
color: 'blue',
textAlign: 'right',
textDecorationLine: 'underline',
textDecorationStyle: 'solid',
},
verticalRightLayout:{
flexDirection: 'column',
},
};
|
piesku/a-moment-lost-in-time | src/game.js | <reponame>piesku/a-moment-lost-in-time
import { Game } from "cervus/core";
import { Plane } from "cervus/shapes";
import { Move, Transform, Render } from "cervus/components";
import { basic } from "cervus/materials";
import { quat } from "cervus/math";
import { rgb_to_hex, hsl_to_rgb } from "cervus/utils";
import { SAVE_SNAPSHOT } from "./actions";
import { dispatch } from "./store";
import { element } from "./level-elements.js";
import * as random from "./random";
import { DummyLookAt, get_score, get_hint } from "./score";
import { distance } from "gl-matrix/src/gl-matrix/vec3";
import { spawn_birds } from "./bird";
const DEBUG = false;
const LEVEL_SIZE = 1000;
const WORLD_SIZE = LEVEL_SIZE * 10;
const SATURATION = 0.7;
const LUMINANCE = 0.6;
const PLAYER_HEIGHT = 1.74;
const BIRD_TRIGGER_DISTANCE = 25;
const BIRD_FLOCK_SIZE = 25;
let props = [];
let birds_positions = [];
function hex(hue, lum) {
const rgb = hsl_to_rgb(hue, SATURATION, lum);
return rgb_to_hex(rgb);
}
export function create_level(lvl_number) {
random.set_seed(Math.pow(random.base_seed / lvl_number, 2));
props = [];
birds_positions = [];
const game = new Game({
width: window.innerWidth,
height: window.innerHeight,
clear_color: "#eeeeee",
// Don't make this too high a number. Clipping makes the distant props
// appear as if they were generated dynamically which is okay. It also
// make the horizon more diverse with gaps of sky peeking between
// buildings close by, even if there's actually a (clipped) prop in the
// distance.
far: LEVEL_SIZE
});
game.camera.add_component(new Move({
keyboard_controlled: false,
mouse_controlled: false,
move_speed: 30,
rotate_speed: .2,
}));
const hue = random.float(0, 1);
const color = hex(hue, LUMINANCE);
const floor = new Plane();
floor.get_component(Transform).scale = [WORLD_SIZE, 1, WORLD_SIZE];
floor.get_component(Render).set({
material: basic,
color
});
game.add(floor);
for (let i = 0; i < Math.pow(lvl_number, 2) + 1; i++) {
element(i, color, random.integer(0, 2)).forEach((el) => {
props.push(el);
game.add(el);
});
}
game.camera.get_component(Transform).position = random.position([0, 0], LEVEL_SIZE / 3);
game.camera.get_component(Transform).look_at(
random.element_of(props).get_component(Transform).position
);
game.camera.get_component(Transform).look_at(
random.look_at_target(game.camera.get_component(Transform).matrix)
);
if (!DEBUG) {
delete game.camera.get_component(Move).dir_desc['69'];
delete game.camera.get_component(Move).dir_desc['81'];
}
// game.camera.get_component(Move).dir_desc['81'] = 'l';
// game.camera.get_component(Move).dir_desc['90'] = 'f';
const spawners = random.integer(2, 4);
for (let i = 0; i < spawners; i++) {
const birds_position = random.position([0, 0], LEVEL_SIZE / 3, -3);
birds_positions.push(birds_position);
if (DEBUG) {
const bird_spawner = element(1, color, 3)[0];
bird_spawner.get_component(Transform).set({
position: birds_position
});
game.add(bird_spawner);
}
}
game.on("afterrender", function take_snapshot() {
game.off("afterrender", take_snapshot);
const target = {
snapshot: game.canvas.toDataURL(),
position: game.camera.get_component(Transform).position,
rotation: game.camera.get_component(Transform).rotation,
};
dispatch(SAVE_SNAPSHOT, target);
game.stop();
});
return [game, hue];
}
export function start_level(game, hue, target) {
game.camera.get_component(Transform).set({
position: [0, PLAYER_HEIGHT, 0],
rotation: quat.create()
});
game.camera.get_component(Move).keyboard_controlled = true;
game.camera.get_component(Move).mouse_controlled = true;
game.camera.add_component(new DummyLookAt({target}));
for (const entity of game.entities) {
entity.get_component(Render).color = "#000000";
}
game.start();
game.on("tick", () => {
for (let i = 0; i < birds_positions.length; i++) {
if (distance(birds_positions[i], game.camera.get_component(Transform).position) < BIRD_TRIGGER_DISTANCE) {
spawn_birds(
birds_positions[i],
hex(hue, LUMINANCE * get_hint(target, game.camera, LEVEL_SIZE)),
LEVEL_SIZE / 5,
BIRD_FLOCK_SIZE,
game
);
birds_positions.splice(i, 1);
break;
}
}
});
game.on("afterrender", function () {
const hint = get_hint(target, game.camera, LEVEL_SIZE);
// XXX Change color on the material instance?
for (const entity of game.entities) {
entity.get_component(Render).color = hex(hue, LUMINANCE * hint);
}
});
}
export function end_level(game, target) {
game.stop();
const score = get_score(target, game.camera, LEVEL_SIZE);
return Math.floor(score * 100);
}
|
fangshun2004/processhacker | SystemInformer/mdump.c | <filename>SystemInformer/mdump.c
/*
* Copyright (c) 2022 Winsider Seminars & Solutions, Inc. All rights reserved.
*
* This file is part of System Informer.
*
* Authors:
*
* wj32 2010-2015
* dmex 2016-2022
*
*/
#include <phapp.h>
#include <apiimport.h>
#include <appresolver.h>
#include <dbghelp.h>
#include <symprv.h>
#include <actions.h>
#include <phsvccl.h>
#include <procprv.h>
#define WM_PH_MINIDUMP_STATUS_UPDATE (WM_APP + 301)
#define WM_PH_MINIDUMP_COMPLETED (WM_APP + 302)
#define WM_PH_MINIDUMP_ERROR (WM_APP + 303)
typedef struct _PH_PROCESS_MINIDUMP_CONTEXT
{
HWND WindowHandle;
HWND ParentWindowHandle;
HANDLE ProcessId;
PPH_PROCESS_ITEM ProcessItem;
PPH_STRING FileName;
PPH_STRING ErrorMessage;
MINIDUMP_TYPE DumpType;
WNDPROC DefaultTaskDialogWindowProc;
HANDLE ProcessHandle;
HANDLE FileHandle;
union
{
BOOLEAN Flags;
struct
{
BOOLEAN IsWow64 : 1;
BOOLEAN IsProcessSnapshot : 1;
BOOLEAN Stop : 1;
BOOLEAN Succeeded : 1;
BOOLEAN Spare : 4;
};
};
ULONG64 LastTickCount;
} PH_PROCESS_MINIDUMP_CONTEXT, *PPH_PROCESS_MINIDUMP_CONTEXT;
INT_PTR CALLBACK PhpProcessMiniDumpDlgProc(
_In_ HWND hwndDlg,
_In_ UINT uMsg,
_In_ WPARAM wParam,
_In_ LPARAM lParam
);
NTSTATUS PhpProcessMiniDumpTaskDialogThread(
_In_ PVOID ThreadParameter
);
PH_INITONCE PhpProcessMiniDumpContextTypeInitOnce = PH_INITONCE_INIT;
PPH_OBJECT_TYPE PhpProcessMiniDumpContextType = NULL;
PPH_STRING PhpProcessMiniDumpGetFileName(
_In_ HWND WindowHandle,
_In_ PPH_PROCESS_ITEM ProcessItem
)
{
static PH_FILETYPE_FILTER filters[] =
{
{ L"Dump files (*.dmp)", L"*.dmp" },
{ L"All files (*.*)", L"*.*" }
};
PPH_STRING fileName = NULL;
PVOID fileDialog;
LARGE_INTEGER time;
SYSTEMTIME systemTime;
PPH_STRING dateString;
PPH_STRING timeString;
PPH_STRING suggestedFileName;
PhQuerySystemTime(&time);
PhLargeIntegerToLocalSystemTime(&systemTime, &time);
dateString = PH_AUTO_T(PH_STRING, PhFormatDate(&systemTime, L"yyyy-MM-dd"));
timeString = PH_AUTO_T(PH_STRING, PhFormatTime(&systemTime, L"HH-mm-ss"));
suggestedFileName = PH_AUTO_T(PH_STRING, PhFormatString(
L"%s_%s_%s.dmp",
PhGetString(ProcessItem->ProcessName),
PhGetString(dateString),
PhGetString(timeString)
));
if (fileDialog = PhCreateSaveFileDialog())
{
PhSetFileDialogFilter(fileDialog, filters, RTL_NUMBER_OF(filters));
PhSetFileDialogFileName(fileDialog, PhGetString(suggestedFileName));
PhSetFileDialogOptions(fileDialog, PH_FILEDIALOG_DONTADDTORECENT);
if (PhShowFileDialog(WindowHandle, fileDialog))
{
fileName = PhGetFileDialogFileName(fileDialog);
}
PhFreeFileDialog(fileDialog);
}
return fileName;
}
VOID PhpProcessMiniDumpContextDeleteProcedure(
_In_ PVOID Object,
_In_ ULONG Flags
)
{
PPH_PROCESS_MINIDUMP_CONTEXT context = Object;
if (context->FileHandle)
NtClose(context->FileHandle);
if (context->ProcessHandle)
NtClose(context->ProcessHandle);
if (context->FileName)
PhDereferenceObject(context->FileName);
if (context->ErrorMessage)
PhDereferenceObject(context->ErrorMessage);
if (context->ProcessItem)
PhDereferenceObject(context->ProcessItem);
}
PPH_PROCESS_MINIDUMP_CONTEXT PhpCreateProcessMiniDumpContext(
VOID
)
{
PPH_PROCESS_MINIDUMP_CONTEXT context;
if (PhBeginInitOnce(&PhpProcessMiniDumpContextTypeInitOnce))
{
PhpProcessMiniDumpContextType = PhCreateObjectType(L"ProcessMiniDumpContextObjectType", 0, PhpProcessMiniDumpContextDeleteProcedure);
PhEndInitOnce(&PhpProcessMiniDumpContextTypeInitOnce);
}
context = PhCreateObject(sizeof(PH_PROCESS_MINIDUMP_CONTEXT), PhpProcessMiniDumpContextType);
memset(context, 0, sizeof(PH_PROCESS_MINIDUMP_CONTEXT));
return context;
}
VOID PhUiCreateDumpFileProcess(
_In_ HWND WindowHandle,
_In_ PPH_PROCESS_ITEM ProcessItem
)
{
NTSTATUS status;
PPH_PROCESS_MINIDUMP_CONTEXT context;
PPH_STRING fileName;
#ifdef _WIN64
BOOLEAN isWow64 = FALSE;
#endif
fileName = PhpProcessMiniDumpGetFileName(WindowHandle, ProcessItem);
if (PhIsNullOrEmptyString(fileName))
return;
context = PhpCreateProcessMiniDumpContext();
context->ParentWindowHandle = WindowHandle;
context->ProcessId = ProcessItem->ProcessId;
context->ProcessItem = PhReferenceObject(ProcessItem);
context->FileName = fileName;
// task manager uses these flags (wj32)
if (WindowsVersion >= WINDOWS_10)
{
context->DumpType =
MiniDumpWithFullMemory |
MiniDumpWithHandleData |
MiniDumpWithUnloadedModules |
MiniDumpWithFullMemoryInfo |
MiniDumpWithThreadInfo |
MiniDumpIgnoreInaccessibleMemory |
MiniDumpWithIptTrace;
if (!NT_SUCCESS(status = PhOpenProcess(
&context->ProcessHandle,
PROCESS_ALL_ACCESS,
context->ProcessId
)))
{
goto LimitedDump;
}
}
else
{
LimitedDump:
context->DumpType =
MiniDumpWithFullMemory |
MiniDumpWithHandleData |
MiniDumpWithUnloadedModules |
MiniDumpWithFullMemoryInfo |
MiniDumpWithThreadInfo;
if (!NT_SUCCESS(status = PhOpenProcess(
&context->ProcessHandle,
PROCESS_QUERY_INFORMATION | PROCESS_VM_READ,
context->ProcessId
)))
{
PhShowStatus(WindowHandle, L"Unable to open the process", status, 0);
PhDereferenceObject(context);
return;
}
}
#ifdef _WIN64
PhGetProcessIsWow64(context->ProcessHandle, &isWow64);
context->IsWow64 = !!isWow64;
#endif
status = PhCreateFileWin32(
&context->FileHandle,
PhGetString(fileName),
FILE_GENERIC_WRITE | DELETE,
FILE_ATTRIBUTE_NORMAL,
0,
FILE_OVERWRITE_IF,
FILE_NON_DIRECTORY_FILE | FILE_SYNCHRONOUS_IO_NONALERT
);
if (!NT_SUCCESS(status))
{
PhShowStatus(WindowHandle, L"Unable to access the dump file", status, 0);
PhDereferenceObject(context);
return;
}
PhCreateThread2(PhpProcessMiniDumpTaskDialogThread, context);
//DialogBoxParam(
// PhInstanceHandle,
// MAKEINTRESOURCE(IDD_PROGRESS),
// NULL,
// PhpProcessMiniDumpDlgProc,
// (LPARAM)context
// );
}
static BOOL CALLBACK PhpProcessMiniDumpCallback(
_Inout_ PVOID CallbackParam,
_In_ PMINIDUMP_CALLBACK_INPUT CallbackInput,
_Inout_ PMINIDUMP_CALLBACK_OUTPUT CallbackOutput
)
{
PPH_PROCESS_MINIDUMP_CONTEXT context = CallbackParam;
PPH_STRING message = NULL;
// Don't try to send status updates if we're creating a dump of the current process.
if (context->ProcessId == NtCurrentProcessId())
return TRUE;
// MiniDumpWriteDump seems to get bored of calling the callback
// after it begins dumping the process handles. The code is
// still here in case they fix this problem in the future.
switch (CallbackInput->CallbackType)
{
case CancelCallback:
{
if (context->Stop)
CallbackOutput->Cancel = TRUE;
CallbackOutput->CheckCancel = TRUE;
}
break;
case IsProcessSnapshotCallback:
{
if (context->IsProcessSnapshot)
CallbackOutput->Status = S_FALSE;
}
break;
//case VmStartCallback:
// {
// CallbackOutput->Status = S_FALSE;
// }
// break;
//case IncludeVmRegionCallback:
// {
// CallbackOutput->Continue = TRUE;
// }
// break;
case ReadMemoryFailureCallback:
{
CallbackOutput->Status = S_OK;
}
break;
case ModuleCallback:
{
PH_FORMAT format[3];
PPH_STRING baseName = NULL;
if (CallbackInput->Module.FullPath)
{
if (baseName = PhCreateString(CallbackInput->Module.FullPath))
{
PhMoveReference(&baseName, PhGetBaseName(baseName));
PhMoveReference(&baseName, PhEllipsisStringPath(baseName, 10));
}
}
// Processing module %s...
PhInitFormatS(&format[0], L"Processing module ");
if (baseName)
PhInitFormatSR(&format[1], baseName->sr);
else
PhInitFormatS(&format[1], L"");
PhInitFormatS(&format[2], L"...");
message = PhFormat(format, RTL_NUMBER_OF(format), 0);
PhClearReference(&baseName);
}
break;
case ThreadCallback:
case ThreadExCallback:
{
PH_FORMAT format[3];
// Processing thread %lu...
PhInitFormatS(&format[0], L"Processing thread ");
PhInitFormatU(&format[1], CallbackInput->Thread.ThreadId);
PhInitFormatS(&format[2], L"...");
message = PhFormat(format, RTL_NUMBER_OF(format), 0);
}
break;
case IncludeVmRegionCallback:
{
PH_FORMAT format[2];
//CallbackOutput->Continue = TRUE;
// Processing memory %lu...
PhInitFormatS(&format[0], L"Processing memory regions");
//PhInitFormatI64X(&format[1], CallbackOutput->VmRegion.BaseAddress);
PhInitFormatS(&format[1], L"...");
message = PhFormat(format, RTL_NUMBER_OF(format), 0);
}
break;
}
if (message)
{
SendMessage(context->WindowHandle, WM_PH_MINIDUMP_STATUS_UPDATE, 0, (LPARAM)message->Buffer);
PhDereferenceObject(message);
}
return TRUE;
}
NTSTATUS PhpProcessMiniDumpThreadStart(
_In_ PVOID Parameter
)
{
PPH_PROCESS_MINIDUMP_CONTEXT context = Parameter;
MINIDUMP_CALLBACK_INFORMATION callbackInfo;
HANDLE snapshotHandle = NULL;
HANDLE packageTaskHandle = NULL;
callbackInfo.CallbackRoutine = PhpProcessMiniDumpCallback;
callbackInfo.CallbackParam = context;
#ifdef _WIN64
if (context->IsWow64)
{
if (PhUiConnectToPhSvcEx(NULL, Wow64PhSvcMode, FALSE))
{
NTSTATUS status;
if (NT_SUCCESS(status = PhSvcCallWriteMiniDumpProcess(
context->ProcessHandle,
context->ProcessId,
context->FileHandle,
context->DumpType
)))
{
context->Succeeded = TRUE;
}
else
{
SendMessage(context->WindowHandle, WM_PH_MINIDUMP_ERROR, 0, (LPARAM)PhNtStatusToDosError(status));
}
PhUiDisconnectFromPhSvc();
goto Completed;
}
else
{
if (PhShowMessage2(
context->WindowHandle,
TDCBF_YES_BUTTON | TDCBF_NO_BUTTON,
TD_WARNING_ICON,
L"The 32-bit version of System Informer could not be located.",
L"A 64-bit dump will be created instead. Do you want to continue?"
) == IDNO)
{
goto Completed;
}
}
}
#endif
if (context->ProcessItem->PackageFullName)
{
// Set the task completion notification (based on taskmgr.exe) (dmex)
//PhAppResolverPackageStopSessionRedirection(context->ProcessItem->PackageFullName);
PhAppResolverBeginCrashDumpTaskByHandle(context->ProcessHandle, &packageTaskHandle);
}
if (NT_SUCCESS(PhCreateProcessSnapshot(
&snapshotHandle,
context->ProcessHandle,
context->ProcessId
)))
{
context->IsProcessSnapshot = TRUE;
}
if (PhWriteMiniDumpProcess(
context->IsProcessSnapshot ? snapshotHandle : context->ProcessHandle,
context->ProcessId,
context->FileHandle,
context->DumpType,
NULL,
NULL,
&callbackInfo
))
{
context->Succeeded = TRUE;
}
else
{
SendMessage(context->WindowHandle, WM_PH_MINIDUMP_ERROR, 0, (LPARAM)GetLastError());
}
if (snapshotHandle)
{
PhFreeProcessSnapshot(snapshotHandle, context->ProcessHandle);
}
if (packageTaskHandle)
{
PhAppResolverEndCrashDumpTask(packageTaskHandle);
}
#ifdef _WIN64
Completed:
#endif
if (context->Succeeded)
{
SendMessage(context->WindowHandle, WM_PH_MINIDUMP_COMPLETED, 0, 0);
}
else
{
PhDeleteFile(context->FileHandle);
}
PhDereferenceObject(context);
return STATUS_SUCCESS;
}
INT_PTR CALLBACK PhpProcessMiniDumpDlgProc(
_In_ HWND hwndDlg,
_In_ UINT uMsg,
_In_ WPARAM wParam,
_In_ LPARAM lParam
)
{
PPH_PROCESS_MINIDUMP_CONTEXT context;
if (uMsg == WM_INITDIALOG)
{
context = (PPH_PROCESS_MINIDUMP_CONTEXT)lParam;
PhSetWindowContext(hwndDlg, PH_WINDOW_CONTEXT_DEFAULT, context);
}
else
{
context = PhGetWindowContext(hwndDlg, PH_WINDOW_CONTEXT_DEFAULT);
}
if (!context)
return FALSE;
switch (uMsg)
{
case WM_INITDIALOG:
{
context->WindowHandle = hwndDlg;
PhSetApplicationWindowIcon(hwndDlg);
PhCenterWindow(hwndDlg, context->ParentWindowHandle);
PhSetWindowText(hwndDlg, L"Creating the dump file...");
PhSetDialogItemText(hwndDlg, IDC_PROGRESSTEXT, L"Creating the dump file...");
PhSetWindowStyle(GetDlgItem(hwndDlg, IDC_PROGRESS), PBS_MARQUEE, PBS_MARQUEE);
SendMessage(GetDlgItem(hwndDlg, IDC_PROGRESS), PBM_SETMARQUEE, TRUE, 75);
PhReferenceObject(context);
PhCreateThread2(PhpProcessMiniDumpThreadStart, context);
SetTimer(hwndDlg, 1, 500, NULL);
}
break;
case WM_DESTROY:
{
KillTimer(hwndDlg, 1);
PhRemoveWindowContext(hwndDlg, PH_WINDOW_CONTEXT_DEFAULT);
PhDereferenceObject(context);
}
break;
case WM_COMMAND:
{
switch (GET_WM_COMMAND_ID(wParam, lParam))
{
case IDCANCEL:
{
EnableWindow(GetDlgItem(hwndDlg, IDCANCEL), FALSE);
context->Stop = TRUE;
}
break;
}
}
break;
case WM_TIMER:
{
if (wParam == 1)
{
ULONG64 currentTickCount;
currentTickCount = NtGetTickCount64();
if (currentTickCount - context->LastTickCount >= 2000)
{
// No status message update for 2 seconds.
PhSetDialogItemText(hwndDlg, IDC_PROGRESSTEXT, L"Creating the dump file...");
context->LastTickCount = currentTickCount;
}
}
}
break;
case WM_PH_MINIDUMP_STATUS_UPDATE:
PhSetDialogItemText(hwndDlg, IDC_PROGRESSTEXT, (PWSTR)lParam);
context->LastTickCount = NtGetTickCount64();
break;
case WM_PH_MINIDUMP_ERROR:
PhShowStatus(hwndDlg, L"Unable to create the minidump", 0, (ULONG)lParam);
break;
case WM_PH_MINIDUMP_COMPLETED:
EndDialog(hwndDlg, IDOK);
break;
case WM_CTLCOLORBTN:
return HANDLE_WM_CTLCOLORBTN(hwndDlg, wParam, lParam, PhWindowThemeControlColor);
case WM_CTLCOLORDLG:
return HANDLE_WM_CTLCOLORDLG(hwndDlg, wParam, lParam, PhWindowThemeControlColor);
case WM_CTLCOLORSTATIC:
return HANDLE_WM_CTLCOLORSTATIC(hwndDlg, wParam, lParam, PhWindowThemeControlColor);
}
return FALSE;
}
HRESULT CALLBACK PhpProcessMiniDumpErrorPageCallbackProc(
_In_ HWND hwndDlg,
_In_ UINT uMsg,
_In_ WPARAM wParam,
_In_ LPARAM lParam,
_In_ LONG_PTR dwRefData
)
{
return S_OK;
}
LRESULT CALLBACK PhpProcessMiniDumpTaskDialogSubclassProc(
_In_ HWND hwndDlg,
_In_ UINT uMsg,
_In_ WPARAM wParam,
_In_ LPARAM lParam
)
{
PPH_PROCESS_MINIDUMP_CONTEXT context;
WNDPROC oldWndProc;
if (!(context = PhGetWindowContext(hwndDlg, 0xF)))
return 0;
oldWndProc = context->DefaultTaskDialogWindowProc;
switch (uMsg)
{
case WM_DESTROY:
{
SetWindowLongPtr(hwndDlg, GWLP_WNDPROC, (LONG_PTR)oldWndProc);
PhRemoveWindowContext(hwndDlg, 0xF);
}
break;
case WM_PH_MINIDUMP_STATUS_UPDATE:
{
SendMessage(hwndDlg, TDM_SET_ELEMENT_TEXT, TDE_CONTENT, lParam);
context->LastTickCount = NtGetTickCount64();
}
break;
case WM_PH_MINIDUMP_ERROR:
{
TASKDIALOGCONFIG config;
PPH_STRING statusMessage;
if (context->Stop)
{
SendMessage(hwndDlg, TDM_CLICK_BUTTON, IDOK, 0);
break;
}
if (statusMessage = PhGetStatusMessage(0, (ULONG)lParam))
{
PhMoveReference(&context->ErrorMessage, statusMessage);
}
memset(&config, 0, sizeof(TASKDIALOGCONFIG));
config.cbSize = sizeof(TASKDIALOGCONFIG);
config.dwFlags = TDF_USE_HICON_MAIN | TDF_ALLOW_DIALOG_CANCELLATION | TDF_CAN_BE_MINIMIZED;
config.hMainIcon = PhGetApplicationIcon(FALSE);
config.dwCommonButtons = TDCBF_CLOSE_BUTTON;
config.pfCallback = PhpProcessMiniDumpErrorPageCallbackProc;
config.lpCallbackData = (LONG_PTR)context;
config.pszWindowTitle = PhApplicationName;
config.pszMainInstruction = L"Unable to create the minidump.";
config.pszContent = PhGetStringOrDefault(context->ErrorMessage, L"Unknown error.");
SendMessage(context->WindowHandle, TDM_NAVIGATE_PAGE, 0, (LPARAM)&config);
}
break;
case WM_PH_MINIDUMP_COMPLETED:
SendMessage(hwndDlg, TDM_CLICK_BUTTON, IDOK, 0);
break;
}
return CallWindowProc(oldWndProc, hwndDlg, uMsg, wParam, lParam);
}
HRESULT CALLBACK PhpProcessMiniDumpTaskDialogCallbackProc(
_In_ HWND hwndDlg,
_In_ UINT uMsg,
_In_ WPARAM wParam,
_In_ LPARAM lParam,
_In_ LONG_PTR dwRefData
)
{
PPH_PROCESS_MINIDUMP_CONTEXT context = (PPH_PROCESS_MINIDUMP_CONTEXT)dwRefData;
switch (uMsg)
{
case TDN_CREATED:
{
context->WindowHandle = hwndDlg;
PhSetApplicationWindowIcon(hwndDlg);
PhCenterWindow(hwndDlg, context->ParentWindowHandle);
context->DefaultTaskDialogWindowProc = (WNDPROC)GetWindowLongPtr(hwndDlg, GWLP_WNDPROC);
PhSetWindowContext(hwndDlg, 0xF, context);
SetWindowLongPtr(hwndDlg, GWLP_WNDPROC, (LONG_PTR)PhpProcessMiniDumpTaskDialogSubclassProc);
SendMessage(hwndDlg, TDM_SET_MARQUEE_PROGRESS_BAR, TRUE, 0);
SendMessage(hwndDlg, TDM_SET_PROGRESS_BAR_MARQUEE, TRUE, 1);
PhReferenceObject(context);
PhCreateThread2(PhpProcessMiniDumpThreadStart, context);
}
break;
case TDN_TIMER:
{
ULONG64 currentTickCount;
currentTickCount = NtGetTickCount64();
if (currentTickCount - context->LastTickCount >= 2000)
{
// No status message update for 2 seconds.
//SendMessage(hwndDlg, TDM_SET_ELEMENT_TEXT, TDE_CONTENT, (LPARAM)L"Creating the minidump file...");
context->LastTickCount = currentTickCount;
}
}
break;
case TDN_BUTTON_CLICKED:
{
ULONG buttonId = (ULONG)wParam;
if (buttonId == IDCANCEL)
{
context->Stop = TRUE;
SendMessage(hwndDlg, TDM_SET_ELEMENT_TEXT, TDE_CONTENT, (LPARAM)L"Cancelling...");
return S_FALSE;
}
}
break;
}
return S_OK;
}
NTSTATUS PhpProcessMiniDumpTaskDialogThread(
_In_ PVOID ThreadParameter
)
{
PPH_PROCESS_MINIDUMP_CONTEXT context = (PPH_PROCESS_MINIDUMP_CONTEXT)ThreadParameter;
TASKDIALOGCONFIG config;
memset(&config, 0, sizeof(TASKDIALOGCONFIG));
config.cbSize = sizeof(TASKDIALOGCONFIG);
config.dwFlags = TDF_USE_HICON_MAIN | TDF_ALLOW_DIALOG_CANCELLATION | TDF_SHOW_MARQUEE_PROGRESS_BAR | TDF_CALLBACK_TIMER | TDF_CAN_BE_MINIMIZED;
config.hMainIcon = PhGetApplicationIcon(FALSE);
config.dwCommonButtons = TDCBF_CANCEL_BUTTON;
config.pfCallback = PhpProcessMiniDumpTaskDialogCallbackProc;
config.lpCallbackData = (LONG_PTR)context;
config.pszWindowTitle = PhApplicationName;
config.pszMainInstruction = L"Creating the minidump file...";
config.pszContent = L"Creating the minidump file...";
config.cxWidth = 200;
TaskDialogIndirect(&config, NULL, NULL, NULL);
PhDereferenceObject(context);
return STATUS_SUCCESS;
}
|
huangyingw/fastai_fastai | nbs/44_tutorial.tabular.py | # ---
# jupyter:
# jupytext:
# formats: ipynb,py
# split_at_heading: true
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.6.0
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# hide
# skip
from fastai.tabular.all import *
! [-e / content] & & pip install - Uqq fastai # upgrade fastai on colab
# # Tabular training
#
# > How to use the tabular application in fastai
# To illustrate the tabular application, we will use the example of the [Adult dataset](https://archive.ics.uci.edu/ml/datasets/Adult) where we have to predict if a person is earning more or less than $50k per year using some general data.
# We can download a sample of this dataset with the usual `untar_data` command:
path = untar_data(URLs.ADULT_SAMPLE)
path.ls()
# Then we can have a look at how the data is structured:
df = pd.read_csv(path / 'adult.csv')
df.head()
# Some of the columns are continuous (like age) and we will treat them as float numbers we can feed our model directly. Others are categorical (like workclass or education) and we will convert them to a unique index that we will feed to embedding layers. We can specify our categorical and continuous column names, as well as the name of the dependent variable in `TabularDataLoaders` factory methods:
dls = TabularDataLoaders.from_csv(path / 'adult.csv', path=path, y_names="salary",
cat_names=['workclass', 'education', 'marital-status', 'occupation', 'relationship', 'race'],
cont_names=['age', 'fnlwgt', 'education-num'],
procs=[Categorify, FillMissing, Normalize])
# The last part is the list of pre-processors we apply to our data:
#
# - `Categorify` is going to take every categorical variable and make a map from integer to unique categories, then replace the values by the corresponding index.
# - `FillMissing` will fill the missing values in the continuous variables by the median of existing values (you can choose a specific value if you prefer)
# - `Normalize` will normalize the continuous variables (substract the mean and divide by the std)
#
#
# To further expose what's going on below the surface, let's rewrite this utilizing `fastai`'s `TabularPandas` class. We will need to make one adjustment, which is defining how we want to split our data. By default the factory method above used a random 80/20 split, so we will do the same:
splits = RandomSplitter(valid_pct=0.2)(range_of(df))
to = TabularPandas(df, procs=[Categorify, FillMissing, Normalize],
cat_names=['workclass', 'education', 'marital-status', 'occupation', 'relationship', 'race'],
cont_names=['age', 'fnlwgt', 'education-num'],
y_names='salary',
splits=splits)
# Before finally building our `DataLoaders` again:
dls = to.dataloaders(bs=64)
# > Later we will explore why using `TabularPandas` to preprocess will be valuable.
# The `show_batch` method works like for every other application:
dls.show_batch()
# We can define a model using the `tabular_learner` method. When we define our model, `fastai` will try to infer the loss function based on our `y_names` earlier.
#
# **Note**: Sometimes with tabular data, your `y`'s may be encoded (such as 0 and 1). In such a case you should explicitly pass `y_block = CategoryBlock` in your constructor so `fastai` won't presume you are doing regression.
learn = tabular_learner(dls, metrics=accuracy)
# And we can train that model with the `fit_one_cycle` method (the `fine_tune` method won't be useful here since we don't have a pretrained model).
learn.fit_one_cycle(1)
# We can then have a look at some predictions:
learn.show_results()
# Or use the predict method on a row:
row, clas, probs = learn.predict(df.iloc[0])
row.show()
clas, probs
# To get prediction on a new dataframe, you can use the `test_dl` method of the `DataLoaders`. That dataframe does not need to have the dependent variable in its column.
test_df = df.copy()
test_df.drop(['salary'], axis=1, inplace=True)
dl = learn.dls.test_dl(test_df)
# Then `Learner.get_preds` will give you the predictions:
learn.get_preds(dl=dl)
# ## `fastai` with Other Libraries
#
# As mentioned earlier, `TabularPandas` is a powerful and easy preprocessing tool for tabular data. Integration with libraries such as Random Forests and XGBoost requires only one extra step, that the `.dataloaders` call did for us. Let's look at our `to` again. It's values are stored in a `DataFrame` like object, where we can extract the `cats`, `conts,` `xs` and `ys` if we want to:
to.xs[:3]
# To then preprocess our data, all we need to do is call `process` to apply all of our `procs` inplace:
to.process()
to.xs[:3]
# Now that everything is encoded, you can then send this off to XGBoost or Random Forests by extracting the train and validation sets and their values:
X_train, y_train = to.train.xs, to.train.ys.values.ravel()
X_test, y_test = to.valid.xs, to.valid.ys.values.ravel()
# And now we can directly send this in!
|
AmrARaouf/algorithm-detection | graph-source-code/437-C/6771448.cpp | //Language: GNU C++
#include <cstring>
#include <cassert>
#include <vector>
#include <list>
#include <queue>
#include <map>
#include <set>
#include <deque>
#include <stack>
#include <bitset>
#include <algorithm>
#include <functional>
#include <numeric>
#include <utility>
#include <sstream>
#include <iostream>
#include <iomanip>
#include <cstdio>
#include <cmath>
#include <cstdlib>
#include <ctime>
#include <fstream>
#include <climits>
#define LL long long
#define MX 5000+7
#define MD 1000000007
#define N 1000007
#define check_bit(a,b) (a & (1<<b))
#define PB push_back
#define FOR(aa,nn) for(aa=0; aa<nn; aa++)
#define vi vector<int>
#define vll vector<long long>
using namespace std;
struct Z
{
int a;
int b;
LL c;
Z() {}
Z(int w,int bb, LL in)
{
a=w;
b = bb;
c =in;
}
};
bool operator<(Z A, Z B)
{
if(A.b == B.b) return A.c>B.c;
return A.b<B.b;
}
int arr[1001];
int cost[1001];
int main()
{
// FILE * fin, * fout, *fp;
// fp=fopen("out.txt", "w");
int i,j;
int a,b;
int n,m,u,v;
cin>>n>>m;
vector<int>V[100001];
for(int i =0 ; i<100001; i++)
V[i].clear();
for(i = 1; i <= n; i++)
cin>>arr[i];
for(i = 0; i < m; i++)
{
cin>>u>>v;
cost[u]+= arr[v];
cost[v]+=arr[u];
V[u].PB(v);
V[v].PB(u);
}
priority_queue<Z>Q;
for(int i = 1; i<= n; i++)
{
Q.push(Z(i,arr[i], cost[i]));
}
LL ans = 0;
bool vis[100001];
memset(vis, false, sizeof vis);
while(!Q.empty())
{
Z top = Q.top();
Q.pop();
int x = top.a;
int c = top.c;
if(vis[x]) continue;
vis[x] = true;
ans += c;
// cout<<x<<" "<<c<<endl;
int sz = V[x].size();
for(int i = 0; i<sz; i++)
{
int adjn = V[x][i];
if(vis[adjn]) continue;
int adjc = cost[adjn] - arr[x];
cost[adjn]= min(cost[adjn], adjc);
Q.push(Z(adjn,arr[adjn], adjc));
}
}
cout<<ans<<endl;
return 0;
}
/*
7 3
5 5 5 5 5 5
1 3 4
4 5 2
6 3 1
*/
|
chetan/bixby-manager | lib/tasks/bixby/create_tenant.rake | <filename>lib/tasks/bixby/create_tenant.rake
namespace :bixby do
desc "create a new tenant"
task :create_tenant => :environment do |t|
disable_logging!
require 'highline'
puts "Create tenant"
puts
h = HighLine.new
begin
name = h.ask("Name: ")
rescue Interrupt => ex
exit 1
end
if name.blank? then
puts "name is required!"
exit 1
end
pass = h.ask("Password: ") { |q| q.echo = "*" }
exit 1 if name.blank? or pass.blank?
Bixby::User.new.create_tenant(name, pass)
end
end
|
phpc0de/idea-android | designer/testSrc/com/android/tools/idea/uibuilder/scene/SceneComplexBaselineConnectionTest.java | /*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.idea.uibuilder.scene;
import com.android.tools.idea.common.fixtures.ModelBuilder;
import com.android.tools.idea.common.scene.target.AnchorTarget;
import com.android.tools.idea.uibuilder.handlers.constraint.targets.BaseLineToggleViewAction;
import org.jetbrains.annotations.NotNull;
import static com.android.SdkConstants.BUTTON;
import static com.android.SdkConstants.CONSTRAINT_LAYOUT;
/**
* Test complex baseline connection interactions
*/
public class SceneComplexBaselineConnectionTest extends SceneTest {
public void testConnectBaseline() {
myInteraction.select("button1", true);
myInteraction.performViewAction("button1", target -> target instanceof BaseLineToggleViewAction);
myInteraction.mouseDown("button1", AnchorTarget.Type.BASELINE);
myInteraction.mouseRelease("button2", AnchorTarget.Type.BASELINE);
myScreen.get("@id/button1")
.expectXml("<Button\n" +
" android:id=\"@id/button1\"\n" +
" android:layout_width=\"100dp\"\n" +
" android:layout_height=\"50dp\"\n" +
" android:text=\"Button\"\n" +
" app:layout_constraintBaseline_toBaselineOf=\"@+id/button2\"\n" +
" tools:layout_editor_absoluteX=\"56dp\" />");
}
@Override
@NotNull
public ModelBuilder createModel() {
ModelBuilder builder = model("constraint.xml",
component(CONSTRAINT_LAYOUT.defaultName())
.id("@id/root")
.withBounds(0, 0, 1000, 1000)
.width("1000dp")
.height("1000dp")
.children(
component(BUTTON)
.id("@id/button1")
.withBounds(56, 295, 100, 50)
.width("100dp")
.height("50dp")
.withAttribute("android:text", "Button")
.withAttribute("tools:layout_editor_absoluteX", "56dp")
.withAttribute("android:layout_marginBottom", "46dp")
.withAttribute("app:layout_constraintBottom_toBottomOf", "parent")
.withAttribute("app:layout_constraintTop_toTopOf", "parent")
.withAttribute("android:layout_marginTop", "8dp")
.withAttribute("app:layout_constraintVertical_bias", "0.704"),
component(BUTTON)
.id("@id/button2")
.withBounds(250, 170, 100, 50)
.width("100dp")
.height("50dp")
.withAttribute("tools:layout_editor_absoluteX", "250dp")
.withAttribute("tools:layout_editor_absoluteY", "170dp")
));
return builder;
}
}
|
CaioRibeiroDev/NodeJs-do-Zero-a-Maestria | 8_NODE_E_MYSQL/5_preparando_query/node_modules/index.js | const express = require('express');
const exphbs = require('express-handlebars');
const mysql = require('mysql')
const app = express()
app.engine('handlebars', exphbs.engine());
app.set('view engine', 'handlebars');
app.use(express.static('public'));
app.get('/', (req, res) => {
res.render('home')
})
const conn = mysql.createConnection({
host: 'localhost',
user: 'root',
password: '',
database: 'nodemysql',
})
conn.connect(function(err) {
if(err) {
console.log(err)
}
console.log('conectou ao mysql')
app.listen(3000)
})
|
JCoetzee123/spira | spira/yevon/geometry/edges/edges_old.py | <reponame>JCoetzee123/spira
import numpy as np
from copy import deepcopy
from spira.yevon.gdsii.group import Group
from spira.core.transforms import *
from spira.yevon.gdsii.elem_list import ElementList
from spira.yevon.gdsii.polygon import Box, Polygon
from spira.yevon.geometry.coord import Coord
from spira.yevon.gdsii.base import __LayerElement__
from spira.core.parameters.descriptor import Parameter
from spira.yevon.process.process_layer import ProcessParameter
from spira.core.parameters.variables import *
from spira.yevon.process.physical_layer import PLayer
from spira.yevon.process import get_rule_deck
__all__ = ['Edge', 'EdgeEuclidean', 'EdgeSquare', 'EdgeSideExtend']
RDD = get_rule_deck()
class __EdgeElement__(Group):
""" Base class for an edge element. """
process = ProcessParameter(doc='Process to which the edge connects.')
width = NumberParameter(default=1, doc='The width of the edge.')
pid = StringParameter(default='no_pid', doc='A unique polygon ID to which the edge connects.')
class Edge(__EdgeElement__):
""" Edge elements are object that represents the edge
of a polygonal shape.
Example
-------
>>> edge Edge()
"""
inward_extend = NumberParameter(default=1, doc='The distance the edge extends inwards to the shape.')
inside = Parameter(fdef_name='create_inside')
outward_extend = NumberParameter(default=1, doc='The distance the edge extends outwards to the shape.')
outside = Parameter(fdef_name='create_outside')
def create_inside(self):
for e in self.elements:
purposes = [RDD.PURPOSE.PORT.INSIDE_EDGE_ENABLED, RDD.PURPOSE.PORT.INSIDE_EDGE_DISABLED]
if e.layer.purpose in purposes:
return e
return None
def create_outside(self):
for e in self.elements:
purposes = [RDD.PURPOSE.PORT.OUTSIDE_EDGE_ENABLED, RDD.PURPOSE.PORT.OUTSIDE_EDGE_DISABLED]
if e.layer.purpose in purposes:
return e
return None
def create_elements(self, elems):
layer = PLayer(process=self.process, purpose=RDD.PURPOSE.PORT.INSIDE_EDGE_DISABLED)
elems += Box(alias='InsideEdge',
width=self.width,
height=self.inward_extend,
center=Coord(0, self.inward_extend/2),
layer=layer)
layer = PLayer(process=self.process, purpose=RDD.PURPOSE.PORT.OUTSIDE_EDGE_DISABLED)
elems += Box(alias='OutsideEdge',
width=self.width,
height=self.outward_extend,
center=Coord(0, -self.outward_extend/2),
layer=layer)
return elems
def EdgeEuclidean(radius=1.0):
""" """
pass
def EdgeSquare():
""" """
pass
def EdgeSideExtend(side_extend=0.0):
""" """
pass
def generate_edges(shape, layer):
""" Generates edge objects for each shape segment. """
xpts = list(shape.x_coords)
ypts = list(shape.y_coords)
n = len(xpts)
xpts.append(xpts[0])
ypts.append(ypts[0])
clockwise = 0
for i in range(0, n):
clockwise += ((xpts[i+1] - xpts[i]) * (ypts[i+1] + ypts[i]))
if layer.name == 'BBOX': bbox = True
else: bbox = False
edges = ElementList()
for i in range(0, n):
name = '{}_e{}'.format(layer.name, i)
x = np.sign(clockwise) * (xpts[i+1] - xpts[i])
y = np.sign(clockwise) * (ypts[i] - ypts[i+1])
orientation = (np.arctan2(x, y) * constants.RAD2DEG)
midpoint = [(xpts[i+1] + xpts[i])/2, (ypts[i+1] + ypts[i])/2]
width = np.abs(np.sqrt((xpts[i+1] - xpts[i])**2 + (ypts[i+1]-ypts[i])**2))
layer = RDD.GDSII.IMPORT_LAYER_MAP[layer]
inward_extend = RDD[layer.process.symbol].MIN_SIZE / 2
outward_extend = RDD[layer.process.symbol].MIN_SIZE / 2
edge = Edge(width=width,
inward_extend=inward_extend,
outward_extend=outward_extend,
process=layer.process)
T = Rotation(orientation+90) + Translation(midpoint)
edge.transform(T)
edges += edge
return edges
# from spira.yevon.geometry.ports.port import Port
# from spira.yevon.process.gdsii_layer import Layer
# def shape_edge_ports(shape, layer, local_pid='None', center=(0,0), loc_name=''):
# edges = PortList()
# xpts = list(shape.x_coords)
# ypts = list(shape.y_coords)
# n = len(xpts)
# xpts.append(xpts[0])
# ypts.append(ypts[0])
# clockwise = 0
# for i in range(0, n):
# clockwise += ((xpts[i+1] - xpts[i]) * (ypts[i+1] + ypts[i]))
# if layer.name == 'BBOX': bbox = True
# else: bbox = False
# layer = RDD.GDSII.IMPORT_LAYER_MAP[layer]
# for i in range(0, n):
# # name = 'E{}_{}'.format(i, layer.process.symbol)
# # name = 'E{}_{}_{}'.format(i, layer.process.symbol, shape.bbox_info.center)
# name = '{}E{}_{}'.format(loc_name, i, layer.process.symbol)
# x = np.sign(clockwise) * (xpts[i+1] - xpts[i])
# y = np.sign(clockwise) * (ypts[i] - ypts[i+1])
# orientation = (np.arctan2(x, y) * constants.RAD2DEG)
# midpoint = [(xpts[i+1] + xpts[i])/2, (ypts[i+1] + ypts[i])/2]
# width = np.abs(np.sqrt((xpts[i+1] - xpts[i])**2 + (ypts[i+1]-ypts[i])**2))
# P = Port(
# name=name,
# process=layer.process,
# purpose=RDD.PURPOSE.PORT.OUTSIDE_EDGE_DISABLED,
# midpoint=midpoint,
# orientation=orientation,
# width=width,
# length=0.2,
# local_pid=local_pid
# )
# edges += P
# return edges
|
apache/juneau | juneau-core/juneau-marshall/src/main/java/org/apache/juneau/json/SimpleJsonParser.java | <reponame>apache/juneau<filename>juneau-core/juneau-marshall/src/main/java/org/apache/juneau/json/SimpleJsonParser.java
// ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.json;
/**
* Parses any valid JSON text into a POJO model.
*
* <h5 class='topic'>Media types</h5>
*
* Handles <c>Content-Type</c> types: <bc>application/json+simple, text/json+simple</bc>
*
* <h5 class='topic'>Description</h5>
*
* Identical to {@link JsonParser} but with the media type <bc>application/json+simple</bc>.
*/
public class SimpleJsonParser extends JsonParser {
//-------------------------------------------------------------------------------------------------------------------
// Static
//-------------------------------------------------------------------------------------------------------------------
/** Default parser, Accept=application/json+simple. */
public static final SimpleJsonParser DEFAULT = new SimpleJsonParser(create());
/**
* Creates a new builder for this object.
*
* @return A new builder.
*/
public static JsonParser.Builder create() {
return JsonParser.create().consumes("application/json+simple,text/json+simple,application/json,text/json");
}
//-------------------------------------------------------------------------------------------------------------------
// Instance
//-------------------------------------------------------------------------------------------------------------------
/**
* Constructor.
*
* @param builder The builder for this object.
*/
protected SimpleJsonParser(JsonParser.Builder builder) {
super(builder);
}
@Override /* Context */
public JsonParser.Builder copy() {
return new JsonParser.Builder(this);
}
} |
capnslipp/clutterapp | app/helpers/stylesheets_helper.rb | <reponame>capnslipp/clutterapp<filename>app/helpers/stylesheets_helper.rb
require 'const_reader'
module StylesheetsHelper
include Colorist
include ConstReader
def debug?
App::Config.debug_style
end
# outline-radius is not supported by WebKit; otherwise, we'd use it as well
def border_radius_decl(size = '4px')
"border-radius: #{size}; -moz-border-radius: #{size}; -webkit-border-radius: #{size};"
end
BG_COLOR = Color.new(0xf6f6f6)
def bg_color
inverted? ? BG_COLOR.invert : BG_COLOR
end
def bright_bg_color
bg_color.text_color.invert
end
def text_color
bg_color.text_color
end
ACCENT_COLOR = Color.new(0xcc3300)
def accent_color
inverted? ? ACCENT_COLOR.invert : ACCENT_COLOR
end
WASH_COLOR = Color.new(0x444444)
def wash_color
inverted? ? WASH_COLOR.invert : WASH_COLOR
end
def wash_text_color
wash_color.text_color
end
GENERIC_BORDER_COLOR = Color.new(0x000000, 0.2)
def generic_border_color(m = 1.0)
color = inverted? ? GENERIC_BORDER_COLOR.invert : GENERIC_BORDER_COLOR
color.with(:a => color.a * m)
end
BORDER_WIDTH = 1
def border_width
BORDER_WIDTH
end
FOCUS_COLOR = ACCENT_COLOR
def focus_color
inverted? ? FOCUS_COLOR.invert : FOCUS_COLOR
end
def focus_back
focus_color.with(:a => 0.25).to_s(:css_rgba)
end
SHADOW_COLOR = Color.new(0x000000, 0.75)
def shadow_color(alpha_multiplier_or_color = 1.0)
if alpha_multiplier_or_color.is_a? Color
alpha_multiplier_or_color
elsif alpha_multiplier_or_color.is_a? String
alpha_multiplier_or_color.to_color
else
color = inverted? ? SHADOW_COLOR.invert : SHADOW_COLOR
Color.from_rgba(color.r, color.g, color.b, color.a * alpha_multiplier_or_color)
end
end
def deep_shadow(alpha_multiplier_or_color = 1.0)
"0px 2px 6px #{shadow_color(alpha_multiplier_or_color).to_s(:css_rgba)}"
end
def shallow_shadow(alpha_multiplier_or_color = 1.0)
alpha_multiplier_or_color *= 0.5 unless alpha_multiplier_or_color.is_a?(String) || alpha_multiplier_or_color.is_a?(Color)
"0px 1px 3px #{shadow_color(alpha_multiplier_or_color).to_s(:css_rgba)}"
end
def deep_shadow_decl(alpha_multiplier_or_color = 1.0)
%W{box-shadow -moz-box-shadow -webkit-box-shadow}.collect {|p| "#{p}: #{deep_shadow(alpha_multiplier_or_color)};" }.join(' ')
end
def shallow_shadow_decl(alpha_multiplier_or_color = 1.0)
%W{box-shadow -moz-box-shadow -webkit-box-shadow}.collect {|p| "#{p}: #{shallow_shadow(alpha_multiplier_or_color)};" }.join(' ')
end
FILL_COLOR = Color.new(0xeeeeee, 0.75)
def fill_color
inverted? ? FILL_COLOR.invert : FILL_COLOR
end
DIVIDER_COLOR = Color.new(0xcccccc)
def divider_color
inverted? ? DIVIDER_COLOR.invert : DIVIDER_COLOR
end
WIDGET_COLOR = Color.new(0xdddddd)
def widget_color
inverted? ? WIDGET_COLOR.invert : WIDGET_COLOR
end
def active_widget_color
inverted? ? widget_color + Color.new(0x333333) : widget_color - Color.new(0x333333)
end
CSS_DIRECTION_QUARTET = [:top, :right, :bottom, :left]
const_reader :css_direction_quartet
protected
def inverted?; false; end
end
|
Sherlock92/greentop | src/sport/PriceSize.cpp | /**
* Copyright 2017 <NAME>. Distributed under the MIT license.
*/
#include "greentop/sport/PriceSize.h"
namespace greentop {
namespace sport {
PriceSize::PriceSize() {
}
PriceSize::PriceSize(const Optional<double>& price,
const Optional<double>& size) :
price(price),
size(size) {
}
void PriceSize::fromJson(const Json::Value& json) {
if (json.isMember("price")) {
price = json["price"].asDouble();
}
if (json.isMember("size")) {
size = json["size"].asDouble();
}
}
Json::Value PriceSize::toJson() const {
Json::Value json(Json::objectValue);
if (price.isValid()) {
json["price"] = price.toJson();
}
if (size.isValid()) {
json["size"] = size.toJson();
}
return json;
}
bool PriceSize::isValid() const {
return price.isValid() && size.isValid();
}
const Optional<double>& PriceSize::getPrice() const {
return price;
}
void PriceSize::setPrice(const Optional<double>& price) {
this->price = price;
}
const Optional<double>& PriceSize::getSize() const {
return size;
}
void PriceSize::setSize(const Optional<double>& size) {
this->size = size;
}
}
}
|
china-liweihong/Lealone2 | lealone-common/src/main/java/org/lealone/storage/replication/ReplicationSession.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lealone.storage.replication;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import org.lealone.db.session.DelegatedSession;
import org.lealone.db.session.Session;
import org.lealone.sql.SQLCommand;
import org.lealone.storage.StorageCommand;
import org.lealone.transaction.Transaction;
public class ReplicationSession extends DelegatedSession {
private final Session[] sessions;
private final String[] servers;
private final String serversStr;
private final String replicationNamePrefix;
private final AtomicInteger counter = new AtomicInteger(1);
private ConsistencyLevel consistencyLevel;
final int n; // 复制集群节点总个数
// 当改变ConsistencyLevel时这两个字段也会随之改动
int r; // 读成功的最少节点个数
int w; // 写成功的最少节点个数
int maxTries = 5;
long rpcTimeoutMillis;
public ReplicationSession(Session[] sessions) {
this(sessions, null);
}
public ReplicationSession(Session[] sessions, List<String> initReplicationNodes) {
super(sessions[0]);
this.sessions = sessions;
this.rpcTimeoutMillis = sessions[0].getNetworkTimeout();
String replicationNodes = null;
if (initReplicationNodes != null) {
StringBuilder buff = new StringBuilder();
for (int i = 0, size = initReplicationNodes.size(); i < size; i++) {
if (i > 0)
buff.append('&');
buff.append(initReplicationNodes.get(i));
}
replicationNodes = buff.toString();
}
n = sessions.length;
servers = new String[n];
StringBuilder buff = new StringBuilder();
for (int i = 0; i < n; i++) {
if (i > 0)
buff.append(',');
servers[i] = sessions[i].getConnectionInfo().getServers();
buff.append(servers[i]);
}
serversStr = buff.toString();
String replicationNamePrefix = getLocalHostAndPort() + "_" + getId() + "_";
if (replicationNodes != null) {
replicationNamePrefix = replicationNodes + "@" + replicationNamePrefix;
}
this.replicationNamePrefix = replicationNamePrefix;
// 设置默认级别
setConsistencyLevel(ConsistencyLevel.ALL);
}
public void setMaxTries(int maxTries) {
this.maxTries = maxTries;
}
public void setRpcTimeout(long rpcTimeoutMillis) {
this.rpcTimeoutMillis = rpcTimeoutMillis;
}
public ConsistencyLevel getConsistencyLevel() {
return consistencyLevel;
}
public void setConsistencyLevel(ConsistencyLevel consistencyLevel) {
this.consistencyLevel = consistencyLevel;
// 使用Write all read one模式
if (consistencyLevel == ConsistencyLevel.ALL) {
w = n;
r = 1;
} else {
w = r = n / 2 + 1;
}
}
// 复制名的格式: hostName:port_sessionId_time_counter,consistencyLevel,serversStr
String createReplicationName() {
StringBuilder n = new StringBuilder(replicationNamePrefix);
n.append(System.nanoTime() / 1000).append("_").append(counter.getAndIncrement());
n.append(',').append(consistencyLevel.code).append(',').append(serversStr);
String replicationName = n.toString();
for (Session s : sessions) {
s.setReplicationName(replicationName);
}
return replicationName;
}
@Override
public SQLCommand createSQLCommand(String sql, int fetchSize) {
ReplicaSQLCommand[] commands = new ReplicaSQLCommand[n];
for (int i = 0; i < n; i++)
commands[i] = sessions[i].createReplicaSQLCommand(sql, fetchSize);
return new ReplicationSQLCommand(this, commands);
}
@Override
public SQLCommand prepareSQLCommand(String sql, int fetchSize) {
ReplicaSQLCommand[] commands = new ReplicaSQLCommand[n];
for (int i = 0; i < n; i++)
commands[i] = sessions[i].prepareReplicaSQLCommand(sql, fetchSize);
return new ReplicationSQLCommand(this, commands);
}
@Override
public StorageCommand createStorageCommand() {
ReplicaStorageCommand[] commands = new ReplicaStorageCommand[n];
for (int i = 0; i < n; i++)
commands[i] = sessions[i].createReplicaStorageCommand();
return new ReplicationStorageCommand(this, commands);
}
@Override
public void addSavepoint(String name) {
for (int i = 0; i < n; i++)
sessions[i].addSavepoint(name);
}
@Override
public void rollbackToSavepoint(String name) {
for (int i = 0; i < n; i++)
sessions[i].rollbackToSavepoint(name);
}
@Override
public void commitTransaction(String localTransactionName) {
for (int i = 0; i < n; i++)
sessions[i].commitTransaction(localTransactionName);
}
@Override
public void rollbackTransaction() {
for (int i = 0; i < n; i++)
sessions[i].rollbackTransaction();
}
@Override
public void setAutoCommit(boolean autoCommit) {
for (int i = 0; i < n; i++)
sessions[i].setAutoCommit(autoCommit);
super.setAutoCommit(autoCommit);
}
@Override
public void cancel() {
for (int i = 0; i < n; i++)
sessions[i].cancel();
}
@Override
public void close() {
for (int i = 0; i < n; i++)
sessions[i].close();
}
@Override
public void setParentTransaction(Transaction transaction) {
for (int i = 0; i < n; i++)
sessions[i].setParentTransaction(transaction);
}
@Override
public void rollback() {
for (int i = 0; i < n; i++)
sessions[i].rollback();
}
@Override
public void setRoot(boolean isRoot) {
for (int i = 0; i < n; i++)
sessions[i].setRoot(isRoot);
}
@Override
public void commit(String allLocalTransactionNames) {
for (int i = 0; i < n; i++)
sessions[i].commit(allLocalTransactionNames);
}
}
|
enfoTek/tomato.linksys.e2000.nvram-mod | tools-src/gnu/gcc/libjava/java/awt/event/ItemEvent.java | <gh_stars>10-100
/* Copyright (C) 2000 Free Software Foundation
This file is part of libjava.
This software is copyrighted work licensed under the terms of the
Libjava License. Please consult the file "LIBJAVA_LICENSE" for
details. */
package java.awt.event;
import java.awt.*;
/**
* @author <NAME> <<EMAIL>>
* @date April 8, 2000
*/
/* Status: Believed complete and correct to JDK 1.2. */
public class ItemEvent extends AWTEvent
{
public static final int DESELECTED = 2;
public static final int ITEM_FIRST = 701;
public static final int ITEM_LAST = 701;
public static final int ITEM_STATE_CHANGED = 701;
public static final int SELECTED = 1;
public ItemEvent (ItemSelectable source, int id, Object item, int sc)
{
super (source, id);
this.item = item;
this.stateChange = sc;
}
public Object getItem ()
{
return item;
}
public ItemSelectable getItemSelectable ()
{
return (ItemSelectable) source;
}
public int getStateChange ()
{
return stateChange;
}
public String paramString ()
{
String r;
switch (id)
{
case ITEM_STATE_CHANGED:
r = "ITEM_STATE_CHANGED";
break;
default:
r = "unknown id";
break;
}
r += ",item=" + item + ",stateChange=";
switch (stateChange)
{
case SELECTED:
r += "SELECTED";
break;
case DESELECTED:
r += "DESELECTED";
break;
default:
r += "unknown";
break;
}
return r;
}
private Object item;
private int stateChange;
}
|
uninth/UNItools | src/GaIA/pkgs/ifstatus/ifstatus/Interface.cc | /** \file Interface.cc
* \author <NAME> <<EMAIL>>
* \date 10-14-04
* \brief Interface class implementation
*
* Class implementation
*/
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
#include "Interface.h"
#include "Config.h"
InterfaceData::InterfaceData()
{
memset((unsigned long long *)&m_ullReceived, 0, eTotalTypes * sizeof(unsigned long long));
memset((unsigned long long *)&m_ullSent, 0, eTotalTypes * sizeof(unsigned long long));
}
InterfaceData & InterfaceData::operator=(InterfaceData & rInterfaceData)
{
// Bytes
setReceivedBytes(rInterfaceData.getReceivedBytes());
setSentBytes(rInterfaceData.getSentBytes());
// Packets
setReceivedPackets(rInterfaceData.getReceivedPackets());
setSentPackets(rInterfaceData.getSentPackets());
// Errors
setReceivingErrors(rInterfaceData.getReceivingErrors());
setSendingErrors(rInterfaceData.getSendingErrors());
return *this;
}
InterfaceData InterfaceData::operator-(InterfaceData & rInterfaceData)
{
InterfaceData cData;
cData.setSentBytes(getSentBytes() - rInterfaceData.getSentBytes());
cData.setReceivedBytes(getReceivedBytes() - rInterfaceData.getReceivedBytes());
cData.setSentPackets(getSentPackets() - rInterfaceData.getSentPackets());
cData.setReceivedPackets(getReceivedPackets() - rInterfaceData.getReceivedPackets());
cData.setSendingErrors(getSendingErrors() - rInterfaceData.getSendingErrors());
cData.setReceivingErrors(getReceivingErrors() - rInterfaceData.getReceivingErrors());
return cData;
}
Interface::Interface(string & strInterface)
{
Initialize(strInterface.c_str());
}
Interface::Interface(char * pInterface)
{
Initialize((const char *)pInterface);
}
Interface::~Interface()
{
m_deqHistory.clear();
}
void Interface::Initialize(const char * pInterface)
{
m_strInterface = pInterface;
m_strOnlineTime = "0d 00:00:00";
m_ulTopBytesSecond = m_ulTopPacketsSecond = 0;
m_ulPeriodBytes = m_ulPeriodPackets = 0;
m_bFirst = true;
m_bOnlineTime = false;
m_bDrawn = false;
ResolveIPAddress();
m_deqHistory.clear();
setInactive();
}
void Interface::Update(void)
{
InterfaceData cDiference = getData() - getPreviousData();
setPreviousData(getData());
if(m_deqHistory.size() > ((unsigned)Window::ScreenColumns() * 10))
{
Config * pConfig = Config::Instance();
int iPeriod = atoi((pConfig->getValue("GraphPeriod")).c_str());
int iErase;
for(iErase = 0; iErase < iPeriod; iErase++)
m_deqHistory.erase(m_deqHistory.begin());
}
m_deqHistory.push_back(cDiference);
setPeriodBytes(cDiference.getSentBytes() + cDiference.getReceivedBytes());
setPeriodPackets(cDiference.getSentPackets() + cDiference.getReceivedPackets());
if(getPeriodBytes() > getTopBytesSecond())
setTopBytesSecond(getPeriodBytes());
if(getPeriodPackets() > getTopPacketsSecond())
setTopPacketsSecond(getPeriodPackets());
UpdateOnlineTime();
setDrawn(false);
}
void Interface::UpdateOnlineTime(void)
{
struct stat sStat;
time_t tCurrentTime = time(NULL);
time_t tOnlineTime;
string strFile = "/var/run/" + m_strInterface + ".pid";
if(!stat(strFile.c_str(), &sStat))
{
int iDays, iHours, iMinutes, iSeconds;
char cTime[12] = { 0 };
m_bOnlineTime = true;
tOnlineTime = tCurrentTime - sStat.st_mtime;
iSeconds = tOnlineTime % 60;
iMinutes = (tOnlineTime / 60) % 60;
iHours = (tOnlineTime / 3600) % 24;
iDays = (tOnlineTime / (3600 * 24)) % 365;
sprintf(cTime, "%dd %.2d:%.2d:%.2d", iDays, iHours, iMinutes, iSeconds);
m_strOnlineTime = cTime;
}
else
{
m_bOnlineTime = false;
}
}
void Interface::ResolveIPAddress(void)
{
struct ifreq sIFReq;
struct sockaddr_in * pSocketAddress;
int iSocket;
if((iSocket = socket(AF_INET, SOCK_DGRAM, 0)) < 0)
{
m_strIPAddress = "Unknown";
return;
}
strcpy(sIFReq.ifr_name, m_strInterface.c_str());
if(ioctl(iSocket, SIOCGIFADDR, &sIFReq) < 0);
pSocketAddress = (struct sockaddr_in *)&sIFReq.ifr_addr;
if(pSocketAddress->sin_addr.s_addr)
m_strIPAddress = inet_ntoa(pSocketAddress->sin_addr);
else
m_strIPAddress = "Unknown";
}
|
bpbpublications/Building-Server-side-and-Microservices-with-Go | Chapter 01/mutex/mutex.go | package main
import (
"fmt"
"sync"
)
func main() {
m := make(map[int]int)
waitGroup := &sync.WaitGroup{}
mapMutex := &sync.Mutex{}
waitGroup.Add(10)
for i := 0; i < 10; i++ {
go func(i int) {
defer waitGroup.Done()
mapMutex.Lock()
m[i] = i
mapMutex.Unlock()
}(i)
}
waitGroup.Wait()
fmt.Println(len(m))
}
|
MarekBykowski/tf-a-tests | include/lib/libc/assert.h | /*
* Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef ASSERT_H
#define ASSERT_H
#include <cdefs.h>
#include <common/debug.h>
#if ENABLE_ASSERTIONS
#define assert(e) ((e) ? (void)0 : __assert(__FILE__, __LINE__, #e))
#else
#define assert(e) ((void)0)
#endif /* ENABLE_ASSERTIONS */
__dead2 void __assert(const char *file, unsigned int line,
const char *assertion);
#endif /* ASSERT_H */
|
heymajor/exercism | python/phone-number/phone_number.py | <gh_stars>0
import re
class PhoneNumber:
number = "0"
area_code = ""
exchange = ""
last = ""
def __init__(self, number):
PhoneNumber.number = number
PhoneNumber.clean(PhoneNumber.number)
PhoneNumber.error(PhoneNumber.number)
PhoneNumber.area_code = PhoneNumber.number[0:3]
PhoneNumber.exchange = PhoneNumber.number[3:6]
PhoneNumber.last = PhoneNumber.number[6:]
def pretty(self):
return f"({PhoneNumber.area_code})-{PhoneNumber.exchange}-{PhoneNumber.last}"
def clean(x):
if re.search('[a-zA-Z]', x): raise ValueError("letters not permitted")
if re.search('[\@\:\!]', x): raise ValueError("punctuations not permitted")
regex = r"[^0-9]"
PhoneNumber.number = re.sub(regex,"", x)
def error(x):
if len(x) < 10: raise ValueError("incorrect number of digits")
if len(x) > 11: raise ValueError("more than 11 digits")
if len(x) == 10:
if x[3] == "0": raise ValueError("exchange code cannot start with zero")
if x[3] == "1": raise ValueError("exchange code cannot start with one")
if x[0] == "0": raise ValueError("area code cannot start with zero")
if x[0] == "1": raise ValueError("area code cannot start with one")
if len(x) == 11:
if x[0] != "1": raise ValueError("11 digits must start with 1")
if x[1] == "0": raise ValueError("area code cannot start with zero")
if x[1] == "1": raise ValueError("area code cannot start with one")
if x[4] == "0": raise ValueError("exchange code cannot start with zero")
if x[4] == "1": raise ValueError("exchange code cannot start with one")
PhoneNumber.number = x[1:]
def main():
# print(PhoneNumber("+1 (223) 456-7890").number)
# print(PhoneNumber("223.456.7890").number) # 2234567890
number = PhoneNumber("2234567890")
# print(number.area_code)
print(number.pretty())
if __name__ == '__main__':
main()
|
disney007/linker | processor/src/test/java/com/linker/processor/TestUtils.java | <filename>processor/src/test/java/com/linker/processor/TestUtils.java
package com.linker.processor;
import com.linker.common.*;
import com.linker.common.codec.Codec;
import com.linker.common.messagedelivery.KafkaExpressDelivery;
import com.linker.common.messages.UserConnected;
import com.linker.common.messages.UserDisconnected;
import org.springframework.stereotype.Component;
import java.util.Comparator;
import java.util.List;
import java.util.stream.IntStream;
import static org.junit.Assert.assertEquals;
@Component
public class TestUtils {
static KafkaExpressDelivery kafkaExpressDelivery;
static Codec codec;
static ProcessorUtils processorUtils;
public TestUtils(KafkaExpressDelivery kafkaExpressDelivery, Codec codec, ProcessorUtils processorUtils) {
TestUtils.kafkaExpressDelivery = kafkaExpressDelivery;
TestUtils.codec = codec;
TestUtils.processorUtils = processorUtils;
}
public static List<Message> sortMessages(List<Message> messages) {
messages.sort(Comparator.comparing(Message::getTo).thenComparing(Message::getFrom));
return messages;
}
public static void messagesEqual(List<Message> expectedMsgs, List<Message> actualMsgs) {
sortMessages(expectedMsgs);
sortMessages(actualMsgs);
assertEquals(expectedMsgs.size(), actualMsgs.size());
IntStream.range(0, expectedMsgs.size())
.forEach(index -> TestUtils.messageEquals(expectedMsgs.get(index), actualMsgs.get(index)));
}
public static void messageEquals(Message expectedMsg, Message actualMsg) {
Object actualData = Utils.convert(actualMsg.getContent().getData(), expectedMsg.getContent().getData().getClass());
actualMsg.getContent().setData(actualData);
assertEquals(expectedMsg.getVersion(), actualMsg.getVersion());
assertEquals(expectedMsg.getContent(), actualMsg.getContent());
assertEquals(expectedMsg.getFrom(), actualMsg.getFrom());
assertEquals(expectedMsg.getTo(), actualMsg.getTo());
assertEquals(expectedMsg.getMeta(), actualMsg.getMeta());
assertEquals(expectedMsg.getState(), actualMsg.getState());
}
public static void internalMessageEquals(Message expectedMsg, Message actualMsg) {
messageEquals(
Utils.convert(expectedMsg.getContent().getData(), Message.class),
Utils.convert(actualMsg.getContent().getData(), Message.class)
);
Message clonedExpectedMsg = expectedMsg.clone();
clonedExpectedMsg.getContent().setData("");
Message clonedActualMsg = actualMsg.clone();
clonedActualMsg.getContent().setData("");
messageEquals(clonedExpectedMsg, clonedActualMsg);
}
public static TestUser loginUser(String userId, Address address) {
Message message = Message.builder()
.from(Keywords.SYSTEM)
.to(Keywords.PROCESSOR)
.content(
MessageUtils.createMessageContent(MessageType.USER_CONNECTED, new UserConnected(userId),
MessageFeature.RELIABLE)
)
.meta(new MessageMeta(address))
.build();
kafkaExpressDelivery.onMessageArrived(codec.serialize(message));
return new TestUser(userId, address, message.getId(), null);
}
public static TestUser loginUser(String userId) {
return loginUser(userId, new Address("domain-01", "connector-01", 10L));
}
public static TestUser loginUser(String userId, Long socketId) {
return loginUser(userId, new Address("domain-01", "connector-01", socketId));
}
public static TestUser loginDomainUser(String userId, Long socketId) {
return loginUser(processorUtils.resolveDomainUserId(userId), socketId);
}
public static void logoutUser(TestUser user) {
Message message = Message.builder()
.from(Keywords.SYSTEM)
.to(Keywords.PROCESSOR)
.content(
MessageUtils.createMessageContent(MessageType.USER_DISCONNECTED, new UserDisconnected(user.getUserId()),
MessageFeature.RELIABLE)
)
.meta(new MessageMeta(user.getAddress()))
.build();
user.setDisconnectedMessageId(message.getId());
kafkaExpressDelivery.onMessageArrived(codec.serialize(message));
}
}
|
kodexa-ai/kodexa | kodexa/model/model.py | <reponame>kodexa-ai/kodexa<gh_stars>1-10
"""
The core model provides definitions for all the base objects in the Kodexa Content Model
"""
import abc
import dataclasses
import inspect
import json
import os
import re
import uuid
from enum import Enum
from typing import Any, List, Optional
import msgpack
from addict import Dict
from kodexa.mixins import registry
from kodexa.model.objects import ModelContentMetadata, ContentObject, DocumentTransition, Store, DocumentFamily
class Ref:
def __init__(self, ref: str):
self.ref: str = ref
first_part = ref
self.version: Optional[str] = None
self.resource: Optional[str] = None
self.slug: str = ""
self.org_slug: str = ""
if ':' in ref:
(first_part, self.version) = ref.split(":")
if '/' in self.version:
(self.version, self.resource) = self.version.split('/')
(self.org_slug, self.slug) = first_part.split("/")
self.object_ref = f"{self.org_slug}/{self.slug}:{self.version}" if self.version else f"{self.org_slug}/{self.slug}"
class DocumentMetadata(Dict):
"""A flexible dict based approach to capturing metadata for the document"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class Tag(Dict):
"""A tag represents the metadata for a label that is applies as a feature on a content node"""
def __init__(self, start: Optional[int] = None, end: Optional[int] = None, value: Optional[str] = None,
uuid: Optional[str] = None, data: Any = None, *args, confidence: Optional[float] = None,
index: Optional[int] = None,
**kwargs):
super().__init__(*args, **kwargs)
self.start: Optional[int] = start
"""The start position (zero indexed) of the content within the node, if None then label is applied to the whole node"""
self.end: Optional[int] = end
"""The end position (zero indexed) of the content within the node, if None then label is applied to the whole node"""
self.value: Optional[str] = value
"""A string representing the value that was labelled in the node"""
self.data: Optional[Any] = data
"""Any data object (JSON serializable) that you wish to associate with the label"""
self.uuid: Optional[str] = uuid
"""The UUID for this tag instance, this allows tags that are on different content nodes to be related through the same UUID"""
self.confidence: Optional[float] = confidence
"""The confidence of the tag in a range of 0-1"""
self.index: Optional[int] = index
"""The tag index, this is used to allow us to order tags, and understand the ordering of parent child tag relationships"""
class FindDirection(Enum):
""" """
CHILDREN = 1
PARENT = 2
class Traverse(Enum):
""" """
SIBLING = 1
CHILDREN = 2
PARENT = 3
ALL = 4
class ContentNode(object):
"""A Content Node identifies a section of the document containing logical
grouping of information.
The node will have content and can include any number of features.
You should always create a node using the Document's create_node method to
ensure that the correct mixins are applied.
>>> new_page = document.create_node(node_type='page')
<kodexa.model.model.ContentNode object at 0x7f80605e53c8>
>>> current_content_node.add_child(new_page)
>>> new_page = document.create_node(node_type='page', content='This is page 1')
<kodexa.model.model.ContentNode object at 0x7f80605e53c8>
>>> current_content_node.add_child(new_page)
"""
def __init__(self, document, node_type: str, content: Optional[str] = None,
content_parts: Optional[List[Any]] = None, parent=None, index: Optional[int] = None,
virtual: bool = False):
self.node_type: str = node_type
"""The node type (ie. line, page, cell etc)"""
self.document: Document = document
"""The document that the node belongs to"""
self._content_parts: Optional[List[Any]] = content_parts
"""The children of the content node"""
self.index: Optional[int] = index
"""The index of the content node"""
self.uuid: Optional[int] = None
"""The ID of the content node"""
self.virtual: bool = virtual
"""Is the node virtual (ie. it doesn't actually exist in the document)"""
self._parent_uuid = parent.uuid if parent else None
if content is not None and len(self.get_content_parts()) == 0:
self.set_content_parts([content])
def get_content_parts(self):
return self.document.get_persistence().get_content_parts(self)
def set_content_parts(self, content_parts):
self.document.get_persistence().update_content_parts(self, content_parts)
@property
def content(self):
if len(self.get_content_parts()) == 0:
return None
s = ""
for part in self.get_content_parts():
if isinstance(part, str):
if s != "":
s += " "
s += part
return s
@content.setter
def content(self, new_content):
if len(self.get_content_parts()) == 0:
self.set_content_parts([new_content])
else:
# We need to remove all the strings and add this one
# back at the front
parts = self.get_content_parts()
filtered_parts = list(filter(lambda part: isinstance(part, int), parts))
if new_content is not None and new_content != "":
filtered_parts.insert(0, new_content)
self.set_content_parts(filtered_parts)
def __eq__(self, other):
return other is not None and self.uuid == other.uuid and (self.uuid is not None and other.uuid is not None)
def get_parent(self):
return self.document.get_persistence().get_node(self._parent_uuid)
def __str__(self):
return f"ContentNode {self.uuid} [node_type:{self.node_type}] ({len(self.get_features())} features, {len(self.get_children())} children) [" + str(
self.content) + "]"
def to_json(self):
"""Create a JSON string representation of this ContentNode.
Args:
Returns:
str: The JSON formatted string representation of this ContentNode.
>>> node.to_json()
"""
return json.dumps(self.to_dict())
def to_dict(self):
"""Create a dictionary representing this ContentNode's structure and content.
Args:
Returns:
dict: The properties of this ContentNode and all of its children structured as a dictionary.
>>> node.to_dict()
"""
new_dict = {'node_type': self.node_type, 'content': self.content, 'content_parts': self.get_content_parts(),
'features': [],
'index': self.index, 'children': [], 'uuid': self.uuid}
for feature in self.get_features():
new_dict['features'].append(feature.to_dict())
for child in self.get_children():
new_dict['children'].append(child.to_dict())
return new_dict
@staticmethod
def from_dict(document, content_node_dict: Dict, parent=None):
"""Build a new ContentNode from a dictionary represention.
Args:
document (Document): The Kodexa document from which the new ContentNode will be created (not added).
content_node_dict (Dict): The dictionary-structured representation of a ContentNode. This value will be unpacked into a ContentNode.
parent (Optional[ContentNode]): Optionally the parent content node
Returns:
ContentNode: A ContentNode containing the unpacked values from the content_node_dict parameter.
>>> ContentNode.from_dict(document, content_node_dict)
"""
node_type = content_node_dict['type'] if document.version == Document.PREVIOUS_VERSION else content_node_dict[
'node_type']
new_content_node = document.create_node(node_type=node_type, content=content_node_dict[
'content'] if 'content' in content_node_dict else None, index=content_node_dict['index'], parent=parent)
if 'content_parts' in content_node_dict and len(content_node_dict['content_parts']) > 0:
new_content_node.set_content_parts(content_node_dict['content_parts'])
for dict_feature in content_node_dict['features']:
feature_type = dict_feature['name'].split(':')[0]
if feature_type == 'tag':
new_content_node.add_feature(feature_type,
dict_feature['name'].split(':')[1],
dict_feature['value'], dict_feature['single'], True)
else:
new_content_node.add_feature(feature_type,
dict_feature['name'].split(':')[1],
dict_feature['value'], dict_feature['single'], True)
for dict_child in content_node_dict['children']:
ContentNode.from_dict(document, dict_child, new_content_node)
return new_content_node
def add_child_content(self, node_type: str, content: str, index: Optional[int] = None) -> 'ContentNode':
"""Convenience method to allow you to quick add a child node with a type and content
Args:
node_type: the node type
content: the content
index: the index (optional) (Default value = None)
Returns:
the new ContentNode
"""
new_node = self.document.create_node(node_type=node_type, parent=self, content=content)
self.add_child(new_node, index)
return new_node
def add_child(self, child, index: Optional[int] = None):
"""Add a ContentNode as a child of this ContentNode
Args:
child (ContentNode): The node that will be added as a child of this node
index (Optional[int]): The index at which this child node should be added; defaults to None. If None, index is set as the count of child node elements.
Returns:
>>> new_page = document.create_node(node_type='page')
<kodexa.model.model.ContentNode object at 0x7f80605e53c8>
>>> current_content_node.add_child(new_page)
"""
if index is None:
if len(self.get_children()) > 0:
child.index = self.get_children()[-1].index + 1
else:
child.index = 0
else:
child.index = index
self.document.get_persistence().add_content_node(child, self)
def remove_child(self, content_node):
try:
child_idx = self.get_children().index(content_node)
child = self.get_children()[child_idx]
self.document.get_persistence().remove_content_node(child)
except ValueError:
pass
def get_children(self):
"""Returns a list of the children of this node.
Returns:
list[ContentNode]: The list of child nodes for this ContentNode.
>>> node.get_children()
"""
return self.document.get_persistence().get_children(self)
def set_feature(self, feature_type, name, value):
"""Sets a feature for this ContentNode, replacing the value if a feature by this type and name already exists.
Args:
feature_type (str): The type of feature to be added to the node.
name (str): The name of the feature.
value (Any): The value of the feature.
Returns:
ContentFeature: The feature that was added to this ContentNode
>>> new_page = document.create_node(node_type='page')
<kodexa.model.model.ContentNode object at 0x7f80605e53c8>
>>> new_page.add_feature('pagination','pageNum',1)
"""
self.remove_feature(feature_type, name)
return self.add_feature(feature_type, name, value)
def add_feature(self, feature_type, name, value, single=True, serialized=False):
"""
Add a new feature to this ContentNode.
Note: if a feature for this feature_type/name already exists, the new value will be added to the existing feature;
therefore the feature value might become a list.
Args:
feature_type (str): The type of feature to be added to the node.
name (str): The name of the feature.
value (Any): The value of the feature.
single (boolean): Indicates that the value is singular, rather than a collection (ex: str vs list); defaults to True.
serialized (boolean): Indicates that the value is/is not already serialized; defaults to False.
Returns:
ContentFeature: The feature that was added to this ContentNode.
>>> new_page = document.create_node(node_type='page')
<kodexa.model.model.ContentNode object at 0x7f80605e53c8>
>>> new_page.add_feature('pagination','pageNum',1)
"""
if self.has_feature(feature_type, name):
feature = self.get_feature(feature_type, name)
feature.single = False # always setting to false if we already have a feature of this type/name
feature.value.append(value)
self.document.get_persistence().remove_feature(self, feature_type, name)
self.document.get_persistence().add_feature(self, feature)
return feature
else:
# Make sure that we treat the value as list all the time
new_feature = ContentFeature(feature_type, name,
[value] if single and not serialized else value, single=single)
self.document.get_persistence().add_feature(self, new_feature)
return new_feature
def delete_children(self, nodes: Optional[List] = None,
exclude_nodes: Optional[List] = None):
"""Delete the children of this node, you can either supply a list of the nodes to delete
or the nodes to exclude from the delete, if neither are supplied then we delete all the children.
Note there is precedence in place, if you have provided a list of nodes to delete then the nodes
to exclude is ignored.
Args:
nodes: Optional[List[ContentNode]] a list of content nodes that are children to delete
exclude_nodes: Optional[List[ContentNode]] a list of content node that are children not to delete
nodes: Optional[List]: (Default value = None)
exclude_nodes: Optional[List]: (Default value = None)
"""
children_to_delete = []
for child_node in self.get_children():
if nodes is not None:
for node_to_delete in nodes:
if node_to_delete.uuid == child_node.uuid:
children_to_delete.append(child_node)
elif exclude_nodes is not None:
if len(exclude_nodes) == 0:
children_to_delete.append(child_node)
else:
for nodes_to_exclude in exclude_nodes:
if nodes_to_exclude.uuid != child_node.uuid:
children_to_delete.append(child_node)
else:
children_to_delete.append(child_node)
for child_to_delete in children_to_delete:
if child_to_delete in self.get_children():
self.document.get_persistence().remove_content_node(child_to_delete)
def get_feature(self, feature_type, name):
"""Gets the value for the given feature.
Args:
feature_type (str): The type of the feature.
name (str): The name of the feature.
Returns:
ContentFeature or None: The feature with the specified type & name. If no feature is found, None is returned.
Note that if there are more than one instance of the feature you will only get the first one
>>> new_page.get_feature('pagination','pageNum')
1
"""
hits = [i for i in self.get_features() if i.feature_type == feature_type and i.name == name]
if len(hits) > 0:
return hits[0]
else:
return None
def get_features_of_type(self, feature_type):
"""Get all features of a specific type.
Args:
feature_type (str): The type of the feature.
Returns:
list[ContentFeature]: A list of feature with the specified type. If no features are found, an empty list is returned.
>>> new_page.get_features_of_type('my_type')
[]
"""
return [i for i in self.get_features() if i.feature_type == feature_type]
def has_feature(self, feature_type: str, name: str):
"""Determines if a feature with the given feature and name exists on this content node.
Args:
feature_type (str): The type of the feature.
name (str): The name of the feature.
Returns:
bool: True if the feature is present; else, False.
>>> new_page.has_feature('pagination','pageNum')
True
"""
return len([i for i in self.get_features() if i.feature_type == feature_type and i.name == name]) > 0
def get_features(self):
"""Get all features on this ContentNode.
Returns:
list[ContentFeature]: A list of the features on this ContentNode.
"""
return self.document.get_persistence().get_features(self)
def remove_feature(self, feature_type: str, name: str, include_children: bool = False):
"""Removes the feature with the given name and type from this node.
Args:
feature_type (str): The type of the feature.
name (str): The name of the feature.
include_children (bool): also remove the feature from nodes children
>>> new_page.remove_feature('pagination','pageNum')
"""
self.document.get_persistence().remove_feature(self, feature_type, name)
if include_children:
for child in self.get_children():
child.remove_feature(feature_type, name, include_children)
def get_feature_value(self, feature_type: str, name: str) -> Optional[Any]:
"""Get the value for a feature with the given name and type on this ContentNode.
Args:
feature_type (str): The type of the feature.
name (str): The name of the feature.
Returns:
Any or None: The value of the feature if it exists on this ContentNode otherwise, None, note this
only returns the first value (check single to determine if there are multiple)
>>> new_page.get_feature_value('pagination','pageNum')
1
"""
feature = self.get_feature(feature_type, name)
# Need to make sure we handle the idea of a single value for a feature
return None if feature is None else feature.value[0]
def get_feature_values(self, feature_type: str, name: str) -> Optional[List[Any]]:
"""Get the value for a feature with the given name and type on this ContentNode.
Args:
feature_type (str): The type of the feature.
name (str): The name of the feature.
Returns:
The list of feature values or None if there is no feature
>>> new_page.get_feature_value('pagination','pageNum')
1
"""
feature = self.get_feature(feature_type, name)
# Simply return all the feature values
return None if feature is None else feature.value
def get_content(self):
"""Get the content of this node.
Args:
Returns:
str: The content of this ContentNode.
>>> new_page.get_content()
"This is page one"
"""
return self.content
def get_node_type(self):
"""Get the type of this node.
Args:
Returns:
str: The type of this ContentNode.
>>> new_page.get_content()
"page"
"""
return self.node_type
def select_first(self, selector, variables=None):
"""Select and return the first child of this node that match the selector value.
Args:
selector (str): The selector (ie. //*)
variables (dict, optional): A dictionary of variable name/value to use in substituion; defaults to None. Dictionary keys should match a variable specified in the selector.
Returns:
Optional[ContentNode]: The first matching node or none
>>> document.get_root().select_first('.')
ContentNode
>>> document.get_root().select_first('//*[hasTag($tagName)]', {"tagName": "div"})
ContentNode
"""
result = self.select(selector, variables)
return result[0] if len(result) > 0 else None
def select(self, selector, variables=None):
"""Select and return the child nodes of this node that match the selector value.
Args:
selector (str): The selector (ie. //*)
variables (dict, optional): A dictionary of variable name/value to use in substituion; defaults to None. Dictionary keys should match a variable specified in the selector.
Returns:
list[ContentNode]: A list of the matching content nodes. If no matches are found, the list will be empty.
>>> document.get_root().select('.')
[ContentNode]
>>> document.get_root().select('//*[hasTag($tagName)]', {"tagName": "div"})
[ContentNode]
"""
if variables is None:
variables = {}
from kodexa.selectors import parse
parsed_selector = parse(selector)
return parsed_selector.resolve(self, variables)
def get_all_content(self, separator=" ", strip=True):
"""Get this node's content, concatenated with all of its children's content.
Args:
separator(str, optional): The separator to use in joining content together; defaults to " ".
strip(boolean, optional): Strip the result
Returns:
str: The complete content for this node concatenated with the content of all child nodes.
>>> document.content_node.get_all_content()
"This string is made up of multiple nodes"
"""
s = ""
children = self.get_content_parts()
for part in children:
if isinstance(part, str):
if s != "":
s += separator
s += part
if isinstance(part, int):
if s != "":
s += separator
s += \
[child.get_all_content(separator, strip=strip) for child in self.get_children() if
child.index == part][
0]
# We need to determine if we have missing children and add them to the end
for child in self.get_children():
if child.index not in self.get_content_parts():
if s != "":
s += separator
s += child.get_all_content(separator, strip=strip)
return s.strip() if strip else s
def adopt_children(self, nodes_to_adopt, replace=False):
"""This will take a list of content nodes and adopt them under this node, ensuring they are re-parented.
Args:
children (List[ContentNode]): A list of ContentNodes that will be added to the end of this node's children collection
replace (bool): If True, will remove all current children and replace them with the new list; defaults to True
>>> # select all nodes of type 'line', then the root node 'adopts' them
>>> # and replaces all it's existing children with these 'line' nodes.
>>> document.get_root().adopt_children(document.select('//line'), replace=True)
"""
child_idx_base = 0
# We need to copy this since we might well mutate
# it as we adopt
children = nodes_to_adopt.copy()
for existing_child in self.get_children():
if existing_child not in children:
self.add_child(existing_child, child_idx_base)
else:
existing_child.index = children.index(existing_child)
child_idx_base += 1
# Copy to avoid mutation
for new_child in children.copy():
if new_child not in self.get_children():
self.add_child(new_child, children.index(new_child))
child_idx_base += 1
if replace:
# Copy to avoid mutation
for child in self.get_children().copy():
if child not in children:
self.remove_child(child)
def remove_tag(self, tag_name):
"""Remove a tag from this content node.
Args:
str: tag_name: The name of the tag that should be removed.
tag_name:
Returns:
>>> document.get_root().remove_tag('foo')
"""
self.remove_feature('tag', tag_name)
def set_statistics(self, statistics):
"""Set the spatial statistics for this node
Args:
statistics: the statistics object
Returns:
>>> document.select.('//page')[0].set_statistics(NodeStatistics())
"""
self.add_feature("spatial", "statistics", statistics)
def get_statistics(self):
"""Get the spatial statistics for this node
:return: the statistics object (or None if not set)
Args:
Returns:
>>> document.select.('//page')[0].get_statistics()
<kodexa.spatial.NodeStatistics object at 0x7f80605e53c8>
"""
return self.get_feature_value("spatial", "statistics")
def set_bbox(self, bbox):
"""Set the bounding box for the node, this is structured as:
[x1,y1,x2,y2]
Args:
bbox: the bounding box array
>>> document.select.('//page')[0].set_bbox([10,20,50,100])
"""
self.set_feature("spatial", "bbox", bbox)
def get_bbox(self):
"""Get the bounding box for the node, this is structured as:
[x1,y1,x2,y2]
:return: the bounding box array
>>> document.select.('//page')[0].get_bbox()
[10,20,50,100]
"""
return self.get_feature_value("spatial", "bbox")
def set_bbox_from_children(self):
"""Set the bounding box for this node based on its children"""
x_min = None
x_max = None
y_min = None
y_max = None
for child in self.get_children():
child_bbox = child.get_bbox()
if child_bbox:
if not x_min or x_min > child_bbox[0]:
x_min = child_bbox[0]
if not x_max or x_max < child_bbox[2]:
x_max = child_bbox[2]
if not y_min or y_min > child_bbox[1]:
y_min = child_bbox[1]
if not y_max or y_max < child_bbox[3]:
y_max = child_bbox[3]
if x_min:
self.set_bbox([x_min, y_min, x_max, y_max])
def set_rotate(self, rotate):
"""Set the rotate of the node
Args:
rotate: the rotation of the node
Returns:
>>> document.select.('//page')[0].set_rotate(90)
"""
self.add_feature("spatial", "rotate", rotate)
def get_rotate(self):
"""Get the rotate of the node
:return: the rotation of the node
Args:
Returns:
>>> document.select.('//page')[0].get_rotate()
90
"""
return self.get_feature_value("spatial", "rotate")
def get_x(self):
"""Get the X position of the node
:return: the X position of the node
Args:
Returns:
>>> document.select.('//page')[0].get_x()
10
"""
self_bbox = self.get_bbox()
if self_bbox:
return self_bbox[0]
else:
return None
def get_y(self):
"""Get the Y position of the node
:return: the Y position of the node
Args:
Returns:
>>> document.select.('//page')[0].get_y()
90
"""
self_bbox = self.get_bbox()
if self_bbox:
return self_bbox[1]
else:
return None
def get_width(self):
"""Get the width of the node
:return: the width of the node
Args:
Returns:
>>> document.select.('//page')[0].get_width()
70
"""
self_bbox = self.get_bbox()
if self_bbox:
return self_bbox[2] - self_bbox[0]
else:
return None
def get_height(self):
"""Get the height of the node
:return: the height of the node
Args:
Returns:
>>> document.select.('//page')[0].get_height()
40
"""
self_bbox = self.get_bbox()
if self_bbox:
return self_bbox[3] - self_bbox[1]
else:
return None
def copy_tag(self, selector=".", existing_tag_name=None, new_tag_name=None):
"""Creates a new tag of 'new_tag_name' on the selected content node(s) with the same information as the tag with 'existing_tag_name'.
Both existing_tag_name and new_tag_name values are required and must be different from one another. Otherwise, no action is taken.
If a tag with the 'existing_tag_name' does not exist on a selected node, no action is taken for that node.
Args:
selector: The selector to identify the source nodes to work on (default . - the current node)
str: existing_tag_name: The name of the existing tag whose values will be copied to the new tag.
str: new_tag_name: The name of the new tag. This must be different from the existing_tag_name.
existing_tag_name: (Default value = None)
new_tag_name: (Default value = None)
Returns:
>>> document.get_root().copy_tag('foo', 'bar')
"""
if existing_tag_name is None or new_tag_name is None or existing_tag_name == new_tag_name:
return # do nothing, just exit function
for node in self.select(selector):
existing_tag_values = node.get_feature_values('tag', existing_tag_name)
if existing_tag_values:
for val in existing_tag_values:
tag = Tag(start=val['start'], end=val['end'], value=val['value'], uuid=val['uuid'],
data=val['data'])
node.add_feature('tag', new_tag_name, tag)
def collect_nodes_to(self, end_node):
"""Get the the sibling nodes between the current node and the end_node.
Args:
ContentNode: end_node: The node to end at
end_node:
Returns:
list[ContentNode]: A list of sibling nodes between this node and the end_node.
>>> document.content_node.get_children()[0].collect_nodes_to(end_node=document.content_node.get_children()[5])
"""
nodes = []
current_node = self
while current_node.uuid != end_node.uuid:
nodes.append(current_node)
if current_node.has_next_node():
current_node = current_node.next_node()
else:
break
return nodes
def tag_nodes_to(self, end_node, tag_to_apply, tag_uuid: str = None):
"""Tag all the nodes from this node to the end_node with the given tag name
Args:
end_node (ContentNode): The node to end with
tag_to_apply (str): The tag name that will be applied to each node
tag_uuid (str): The tag uuid used if you want to group them
>>> document.content_node.get_children()[0].tag_nodes_to(document.content_node.get_children()[5], tag_name='foo')
"""
[node.tag(tag_to_apply, tag_uuid=tag_uuid) for node in self.collect_nodes_to(end_node)]
def tag_range(self, start_content_re, end_content_re, tag_to_apply, node_type_re='.*', use_all_content=False):
"""This will tag all the child nodes between the start and end content regular expressions
Args:
start_content_re: The regular expression to match the starting child
end_content_re: The regular expression to match the ending child
tag_to_apply: The tag name that will be applied to the nodes in range
node_type_re: The node type to match (default is all)
use_all_content: Use full content (including child nodes, default is False)
Returns:
>>> document.content_node.tag_range(start_content_re='.*Cheese.*', end_content_re='.*Fish.*', tag_to_apply='foo')
"""
# Could be line, word, or content-area
all_nodes = self.select(f"//*[typeRegex('{node_type_re}')]")
start_index_list = [n_idx for n_idx, node in enumerate(all_nodes)
if re.compile(start_content_re).match(node.get_all_content()
if use_all_content else node.content)]
end_index_list = [n_idx for n_idx, node in enumerate(all_nodes)
if re.compile(end_content_re).match(node.get_all_content()
if use_all_content else node.content)]
start_index = 0 if start_content_re == '' else \
start_index_list[0] if len(start_index_list) > 0 else None
if start_index is not None:
end_index_list = [i for i in end_index_list if i >= start_index]
end_index = len(all_nodes) if end_content_re == '' else \
end_index_list[0] if len(end_index_list) > 0 else len(all_nodes)
if start_index is not None:
[node.tag(tag_to_apply) for node in all_nodes[start_index:end_index]]
def tag(self, tag_to_apply, selector=".", content_re=None,
use_all_content=False, node_only=None,
fixed_position=None, data=None, separator=" ", tag_uuid: str = None, confidence=None, value=None,
use_match=True, index=None):
"""
This will tag (see Feature Tagging) the expression groups identified by the regular expression.
Note that if you use the flag use_all_content then node_only will default to True if not set, else it
will default to False
Args:
tag_to_apply: The name of tag that will be applied to the node
selector: The selector to identify the source nodes to work on (default . - the current node)
content_re: The regular expression that you wish to use to tag, note that we will create a tag for each matching group (Default value = None)
use_all_content: Apply the regular expression to the all_content (include content from child nodes) (Default value = False)
separator: Separator to use for use_all_content (Default value = " ")
node_only: Ignore the matching groups and tag the whole node (Default value = None)
fixed_position: Use a fixed position, supplied as a tuple i.e. - (4,10) tag from position 4 to 10 (default None)
data: A dictionary of data for the given tag (Default value = None)
tag_uuid: A UUID used to tie tags in order to demonstrate they're related and form a single concept.
For example, if tagging the two words "Wells" and "Fargo" as an ORGANIZATION, the tag on both words should have the
same tag_uuid in order to indicate they are both needed to form the single ORGANIZATION. If a tag_uuid is provided, it is used
on all tags created in this method. This may result in multiple nodes or multiple feature values having the same tag_uuid.
For example, if the selector provided results in more than one node being selected, each node would be tagged with the same tag_uuid.
The same holds true if a content_re value is provided, node_only is set to False, and multiple matches are found for the content_re
pattern. In that case, each feature value would share the same UUID.
If no tag_uuid is provided, a new uuid is generated for each tag instance.
tag_uuid: str: (Default value = None)
confidence: The confidence in the tag (0-1)
value: The value you wish to store with the tag, this allows you to provide text that isn't part of the content but represents the data you wish tagged
use_match: If True (default) we will use match for regex matching, if False we will use search
index: The index for the tag
>>> document.content_node.tag('is_cheese')
"""
if use_all_content and node_only is None:
node_only = True
elif node_only is None:
node_only = False
def get_tag_uuid(tag_uuid):
if tag_uuid:
return tag_uuid
else:
return str(uuid.uuid4())
def tag_node_position(node_to_check, start, end, node_data, tag_uuid, offset=0, value=None):
content_length = 0
original_start = start
original_end = end
for part_idx, part in enumerate(node_to_check.get_content_parts()):
if isinstance(part, str):
if len(part) > 0:
# It is just content
part_length = len(part)
if part_idx > 0:
end = end - len(separator)
content_length = content_length + len(separator)
offset = offset + len(separator)
start = 0 if start - len(separator) < 0 else start - len(separator)
if start < part_length and end < part_length:
node_to_check.add_feature('tag', tag_to_apply,
Tag(original_start, original_end,
part[start:end] if value is None else value,
data=node_data, uuid=tag_uuid, confidence=confidence,
index=index))
return -1
elif start < part_length <= end:
node_to_check.add_feature('tag', tag_to_apply,
Tag(original_start,
content_length + part_length,
value=part[start:] if value is None else value,
data=node_data, uuid=tag_uuid, confidence=confidence,
index=index))
end = end - part_length
content_length = content_length + part_length
offset = offset + part_length
start = 0 if start - part_length < 0 else start - part_length
elif isinstance(part, int):
child_node = [child for child in node_to_check.get_children() if child.index == part][0]
if part_idx > 0:
end = end - len(separator)
content_length = content_length + len(separator)
offset = offset + len(separator)
start = 0 if start - len(separator) < 0 else start - len(separator)
result = tag_node_position(child_node, start, end, node_data, tag_uuid,
offset=offset, value=value)
if result < 0 or (end - result) <= 0:
return -1
else:
offset = offset + result
end = end - result
start = 0 if start - result < 0 else start - result
content_length = content_length + result
else:
raise Exception("Invalid part?")
# We need to determine if we have missing children and add them to the end
for child_idx, child_node in enumerate(node_to_check.get_children()):
if child_node.index not in node_to_check.get_content_parts():
if content_length > 0:
end = end - len(separator)
content_length = content_length + len(separator)
offset = offset + len(separator)
start = 0 if start - len(separator) < 0 else start - len(separator)
result = tag_node_position(child_node, start, end, node_data, tag_uuid,
offset=offset, value=value)
if result < 0 or (end - result) <= 0:
return -1
else:
offset = offset + result
end = end - result
start = 0 if start - result < 0 else start - result
content_length = content_length + result
if len(node_to_check.get_all_content(strip=False)) != content_length:
raise Exception(
f"There is a problem in the structure? (2) Length mismatch ({len(node_to_check.get_all_content(strip=False))} != {content_length})")
return content_length
if content_re:
pattern = re.compile(content_re.replace(' ', '\s+') if use_all_content and not node_only else content_re)
for node in self.select(selector):
if fixed_position:
tag_node_position(node, fixed_position[0], fixed_position[1], data, get_tag_uuid(tag_uuid), 0,
value=value)
else:
if not content_re:
node.add_feature('tag', tag_to_apply,
Tag(data=data, uuid=get_tag_uuid(tag_uuid), confidence=confidence, value=value,
index=index))
else:
if not use_all_content:
if node.content:
content = node.content
else:
content = None
else:
content = node.get_all_content(separator=separator,
strip=False) if not node_only else node.get_all_content(
separator=separator)
if content is not None:
if use_match:
matches = pattern.finditer(content)
if node_only:
if any(True for _ in matches):
node.add_feature('tag', tag_to_apply,
Tag(data=data, uuid=get_tag_uuid(tag_uuid), confidence=confidence,
value=value, index=index))
else:
if matches:
for match in matches:
start_offset = match.span()[0]
end_offset = match.span()[1]
tag_node_position(node, start_offset, end_offset, data, get_tag_uuid(tag_uuid),
value=value)
else:
search_match = pattern.search(content)
if search_match is not None:
start_offset = search_match.span()[0]
end_offset = search_match.span()[1]
tag_node_position(node, start_offset, end_offset, data, get_tag_uuid(tag_uuid),
value=value)
def get_tags(self):
"""Returns a list of the names of the tags on the given node
:return: A list of the tag name
Args:
Returns:
>>> document.content_node.select('*').get_tags()
['is_cheese']
"""
return [i.name for i in self.get_features_of_type("tag")]
def get_tag_values(self, tag_name, include_children=False):
"""Get the values for a specific tag name
Args:
tag_name: tag name
include_children: include the children of this node (Default value = False)
Returns:
a list of the tag values
"""
values = []
for tag in self.get_tag(tag_name):
values.append(tag['value'])
if include_children:
for child in self.get_children():
values.extend(child.get_tag_values(tag_name, include_children))
return values
def get_related_tag_values(self, tag_name: str, include_children: bool = False, value_separator: str = ' '):
"""Get the values for a specific tag name, grouped by uuid
Args:
tag_name (str): tag name
include_children (bool): include the children of this node
value_separator (str): the string to be used to join related tag values
Returns:
a list of the tag values
"""
def group_tag_values(group_dict, feature_val):
# we know the names of all these tags are the same, but we want to group them if they share the same uuid
if feature_val['uuid'] in value_groups.keys():
# we've seen this UUID - add it's value to the group
group_dict[feature_val['uuid']].append(feature_val['value'])
else:
# first occurrence
group_dict[feature_val['uuid']] = [feature_val['value']]
if include_children:
tagged_nodes = self.select('//*[hasTag("' + tag_name + '")]')
else:
tagged_nodes = self.select('.')
value_groups: Dict[str, Any] = {}
for tag_node in tagged_nodes:
tag_feature_vals = tag_node.get_feature_value('tag', tag_name)
if tag_feature_vals:
if not isinstance(tag_feature_vals, list):
tag_feature_vals = [tag_feature_vals]
for v in tag_feature_vals:
group_tag_values(value_groups, v)
value_strings = []
for k in value_groups.keys():
value_strings.append(value_separator.join(value_groups[k]))
return value_strings
def get_related_tag_nodes(self, tag_name: str, include_children: bool = False):
"""Get the nodes for a specific tag name, grouped by uuid
Args:
tag_name (str): tag name
include_children (bool): include the children of this node
Returns:
a list of the tag content nodes
"""
if include_children:
tagged_nodes = self.select(f'//*[hasTag("{tag_name}")]')
else:
tagged_nodes = [self]
# We need to group these nodes together based on the TAG UUID
node_groups = {}
for tagged_node in tagged_nodes:
tag_instances = tagged_node.get_tag(tag_name)
for tag_instance in tag_instances:
if tag_instance['uuid'] not in node_groups:
node_groups[tag_instance['uuid']] = [tagged_node]
else:
node_groups[tag_instance['uuid']].append(tagged_node)
return node_groups
def get_tag(self, tag_name, tag_uuid=None):
"""Returns the value of a tag (a dictionary), this can be either a single value in a list [[start,end,value]] or if multiple parts of the
content of this node match you can end up with a list of lists i.e. [[start1,end1,value1],[start2,end2,value2]]
Args:
tag_name: The name of the tag
tag_uuid (Optional): Optionally you can also provide the tag UUID
Returns:
A list tagged location and values for this label in this node
>>> document.content_node.select_first('//*[contentRegex(".*Cheese.*")]').get_tag('is_cheese')
[0,10,'The Cheese Moved']
"""
tag_details = self.get_feature_value('tag', tag_name)
if tag_details is None:
return []
if not isinstance(tag_details, list):
tag_details = [tag_details]
final_result = []
for tag_detail in tag_details:
if 'uuid' in tag_detail and tag_uuid:
if tag_detail['uuid'] == tag_uuid:
final_result.append(tag_detail)
else:
final_result.append(tag_detail)
return final_result
def get_all_tags(self):
"""Get the names of all tags that have been applied to this node or to its children.
Args:
Returns:
list[str]: A list of the tag names belonging to this node and/or its children.
>>> document.content_node.select_first('//*[contentRegex(".*Cheese.*")]').get_all_tags()
['is_cheese']
"""
tags = []
tags.extend(self.get_tags())
for child in self.get_children():
tags.extend(child.get_all_tags())
return list(set(tags))
def has_tags(self):
"""Determines if this node has any tags at all.
Args:
Returns:
bool: True if node has any tags; else, False;
>>> document.content_node.select_first('//*[contentRegex(".*Cheese.*")]').has_tags()
True
"""
return len([i.value for i in self.get_features_of_type("tag")]) > 0
def has_tag(self, tag, include_children=False):
"""Determine if this node has a tag with the specified name.
Args:
tag(str): The name of the tag.
include_children(bool): should we include child nodes
Returns:
bool: True if node has a tag by the specified name; else, False;
>>> document.content_node.select_first('//*[contentRegex(".*Cheese.*")]').has_tag('is_cheese')
True
>>> document.content_node.select_first('//*[contentRegex(".*Cheese.*")]').has_tag('is_fish')
False
"""
for feature in self.get_features():
if feature.feature_type == 'tag' and feature.name == tag:
return True
result = False
if include_children:
for child in self.get_children():
if child.has_tag(tag, True):
result = True
return result
def is_first_child(self):
"""Determines if this node is the first child of its parent or has no parent.
Args:
Returns:
bool: True if this node is the first child of its parent or if this node has no parent; else, False;
"""
if not self.parent:
return True
else:
return self.index == 0
def is_last_child(self):
"""Determines if this node is the last child of its parent or has no parent.
Returns:
bool: True if this node is the last child of its parent or if this node has no parent; else, False;
"""
if not self.get_parent():
return True
else:
return self.index == self.get_parent().get_last_child_index()
def get_last_child_index(self):
"""Returns the max index value for the children of this node. If the node has no children, returns None.
Returns:
int or None: The max index of the children of this node, or None if there are no children.
"""
if not self.get_children():
return None
max_index = 0
for child in self.get_children():
if child.index > max_index:
max_index = child.index
return max_index
def get_node_at_index(self, index):
"""Returns the child node at the specified index. If the specified index is outside the first (0), or
last child's index, None is returned.
Note: documents allow for sparse representation and child nodes may not have consecutive index numbers.
If there isn't a child node at the specfied index, a 'virtual' node will be returned. This 'virtual' node
will have the node type of its nearest sibling and will have an index value, but will have no features or content.
Args:
index (int): The index (zero-based) for the child node.
Returns:
ContentNode or None: Node at index, or None if the index is outside the boundaries of child nodes.
"""
if self.get_children():
if index < self.get_children()[0].index:
virtual_node = self.document.create_node(node_type=self.get_children()[0].node_type, virtual=True,
parent=self,
index=index)
return virtual_node
last_child = None
for child in self.get_children():
if child.index < index:
last_child = child
elif child.index == index:
return child
else:
break
if last_child:
if last_child.index != index and index < self.get_children()[-1].index:
virtual_node = self.document.create_node(node_type=last_child.node_type, virtual=True, parent=self,
index=index)
return virtual_node
else:
return None
else:
return None
def has_next_node(self, node_type_re=".*", skip_virtual=False):
"""Determine if this node has a next sibling that matches the type specified by the node_type_re regex.
Args:
node_type_re(str, optional, optional): The regular expression to match against the next sibling node's type; default is '.*'.
skip_virtual(bool, optional, optional): Skip virtual nodes and return the next real node; default is False.
Returns:
bool: True if there is a next sibling node matching the specified type regex; else, False.
"""
return self.next_node(node_type_re, skip_virtual=skip_virtual) is not None
def has_previous_node(self, node_type_re=".*", skip_virtual=False):
"""Determine if this node has a previous sibling that matches the type specified by the node_type_re regex.
Args:
node_type_re(str, optional, optional): The regular expression to match against the previous sibling node's type; default is '.*'.
skip_virtual(bool, optional, optional): Skip virtual nodes and return the next real node; default is False.
Returns:
bool: True if there is a previous sibling node matching the specified type regex; else, False.
"""
return self.previous_node(node_type_re=node_type_re, skip_virtual=skip_virtual) is not None
def next_node(self, node_type_re='.*', skip_virtual=False, has_no_content=True):
"""Returns the next sibling content node.
Note: This logic relies on node indexes. Documents allow for sparse representation and child nodes may not have consecutive index numbers.
Therefore, the next node might actually be a virtual node that is created to fill a gap in the document. You can skip virtual nodes by setting the
skip_virtual parameter to False.
Args:
node_type_re(str, optional, optional): The regular expression to match against the next sibling node's type; default is '.*'.
skip_virtual(bool, optional, optional): Skip virtual nodes and return the next real node; default is False.
has_no_content(bool, optional, optional): Allow a node that has no content to be returned; default is True.
Returns:
ContentNode or None: The next node or None, if no node exists
"""
search_index = self.index + 1
compiled_node_type_re = re.compile(node_type_re)
while True:
node = self.get_parent().get_node_at_index(search_index) if self.get_parent() else None
if not node:
return node
if compiled_node_type_re.match(node.node_type) and (not skip_virtual or not node.virtual):
if (not has_no_content and node.content) or has_no_content:
return node
search_index += 1
def previous_node(self, node_type_re='.*', skip_virtual=False, has_no_content=False, traverse=Traverse.SIBLING):
"""Returns the previous sibling content node.
Note: This logic relies on node indexes. Documents allow for sparse representation and child nodes may not have consecutive index numbers.
Therefore, the previous node might actually be a virtual node that is created to fill a gap in the document. You can skip virtual nodes by setting the
skip_virtual parameter to False.
Args:
node_type_re(str, optional, optional): The regular expression to match against the previous node's type; default is '.*'.
skip_virtual(bool, optional, optional): Skip virtual nodes and return the next real node; default is False.
has_no_content(bool, optional, optional): Allow a node that has no content to be returned; default is False.
traverse(Traverse(enum), optional, optional): The transition you'd like to traverse (SIBLING, CHILDREN, PARENT, or ALL); default is Traverse.SIBLING.
Returns:
ContentNode or None: The previous node or None, if no node exists
"""
# TODO: implement/differentiate traverse logic for CHILDREN and SIBLING
if self.index == 0:
if traverse == traverse.ALL or traverse == traverse.PARENT and self.get_parent():
# Lets look for a previous node on the parent
return self.get_parent().previous_node(node_type_re, skip_virtual, has_no_content, traverse)
else:
return None
search_index = self.index - 1
compiled_node_type_re = re.compile(node_type_re)
while True:
node = self.get_parent().get_node_at_index(search_index)
if not node:
return node
if compiled_node_type_re.match(node.node_type) and (not skip_virtual or not node.virtual):
if (not has_no_content) or (has_no_content and not node.content):
return node
search_index -= 1
class ContentFeature(object):
"""A feature allows you to capture almost any additional data or metadata and associate it with a ContentNode"""
def __init__(self, feature_type: str, name: str, value: Any, single: bool = True):
self.feature_type: str = feature_type
"""The type of feature, a logical name to group feature types together (ie. spatial)"""
self.name: str = name
"""The name of the feature (ie. bbox)"""
self.value: Any = value
"""Description of the feature (Optional)"""
self.single: bool = single
"""Determines whether the data for this feature is a single instance or an array, if you have added the same feature to the same node you will end up with multiple data elements in the content feature and the single flag will be false"""
def __str__(self):
return f"Feature [type='{self.feature_type}' name='{self.name}' value='{self.value}' single='{self.single}']"
def to_dict(self):
"""Create a dictionary representing this ContentFeature's structure and content.
Returns:
dict: The properties of this ContentFeature structured as a dictionary.
>>> node.to_dict()
"""
return {'name': self.feature_type + ':' + self.name, 'value': self.value, 'single': self.single}
def get_value(self):
"""Get the value from the feature. This method will handle the single flag
Returns:
The value of the feature
"""
if self.single:
return self.value[0]
else:
return self.value
@dataclasses.dataclass()
class SourceMetadata:
"""Class for keeping track of the original source information for a
document
Args:
Returns:
"""
original_filename: Optional[str] = None
original_path: Optional[str] = None
checksum: Optional[str] = None
# The ID used for internal caching
cid: Optional[str] = None
last_modified: Optional[str] = None
created: Optional[str] = None
connector: Optional[str] = None
mime_type: Optional[str] = None
headers: Optional[Dict] = None
# The UUID of the document that this document was derived from
# noting that multiple documents coming from an original source
lineage_document_uuid: Optional[str] = None
# The UUID of the original first document
source_document_uuid: Optional[str] = None
# The UUID of the document in a PDF form (used for archiving and preview)
pdf_document_uuid: Optional[str] = None
@classmethod
def from_dict(cls, env):
"""
Args:
env:
Returns:
"""
return cls(**{
k: v for k, v in env.items()
if k in inspect.signature(cls).parameters
})
class ContentClassification(object):
"""A content classification captures information at the document level to track classification metadata"""
def __init__(self, label: str, taxonomy: Optional[str] = None, selector: Optional[str] = None,
confidence: Optional[float] = None):
self.label = label
self.taxonomy = taxonomy
self.selector = selector
self.confidence = confidence
def to_dict(self):
return {"label": self.label, "taxonomy": self.taxonomy, "selector": self.selector,
"confidence": self.confidence}
@classmethod
def from_dict(cls, dict_val):
return ContentClassification(label=dict_val['label'], taxonomy=dict_val.get('taxonomy'),
selector=dict_val.get('selector'), confidence=dict_val.get('confidence'))
class Document(object):
"""A Document is a collection of metadata and a set of content nodes."""
PREVIOUS_VERSION: str = "1.0.0"
CURRENT_VERSION: str = "4.0.1"
def __str__(self):
return f"kodexa://{self.uuid}"
def __init__(self, metadata=None, content_node: ContentNode = None, source=None, ref: str = None,
kddb_path: str = None, delete_on_close=False):
if metadata is None:
metadata = DocumentMetadata()
if source is None:
source = SourceMetadata()
# Mix-ins are going away - so we will allow people to turn them off as needed
self.disable_mixin_methods = True
self.delete_on_close = delete_on_close
# The ref is not stored and is used when we have
# initialized a document from a remote store and want
# to keep track of that
self.ref = ref
self.metadata: DocumentMetadata = metadata
"""Metadata relating to the document"""
self._content_node: Optional[ContentNode] = content_node
"""The root content node"""
self.virtual: bool = False
"""Is the document virtual (deprecated)"""
self._mixins: List[str] = []
"""A list of the mixins for this document"""
self.uuid: str = str(uuid.uuid4())
"""The UUID of this document"""
self.exceptions: List = []
"""A list of the exceptions on this document (deprecated)"""
self.log: List[str] = []
"""A log for this document (deprecated)"""
self.version = Document.CURRENT_VERSION
"""The version of the document"""
self.source: SourceMetadata = source
"""Source metadata for this document"""
self.labels: List[str] = []
"""A list of the document level labels for the document"""
self.taxonomies: List[str] = []
"""A list of the taxonomy references for this document"""
self.classes: List[ContentClassification] = []
"""A list of the content classifications associated at the document level"""
self.add_mixin('core')
# Start persistence layer
from kodexa.model import PersistenceManager
self._persistence_layer: Optional[PersistenceManager] = PersistenceManager(document=self,
filename=kddb_path,
delete_on_close=delete_on_close)
self._persistence_layer.initialize()
def get_persistence(self):
return self._persistence_layer
@property
def content_node(self):
"""The root content Node"""
return self._content_node
@content_node.setter
def content_node(self, value):
value.index = 0
if value != self._content_node and self._content_node is not None:
self.get_persistence().remove_content_node(self._content_node)
self._content_node = value
if value is not None:
self.get_persistence().add_content_node(self._content_node, None)
def add_classification(self, label: str, taxonomy_ref: Optional[str] = None) -> ContentClassification:
"""Add a content classification to the document
Args:
label(str): the label
taxonomy_ref(Optional[str]): the reference to the taxonomy
Returns:
the content classification created (or the matching one if it is already on the document)
"""
content_classification = ContentClassification(label, taxonomy=taxonomy_ref)
for existing_class in self.classes:
if existing_class.label == content_classification.label:
return existing_class
self.classes.append(content_classification)
return content_classification
def add_label(self, label: str):
"""Add a label to the document
Args:
label: str Label to add
label: str:
Returns:
the document
"""
if label not in self.labels:
self.labels.append(label)
return self
def remove_label(self, label: str):
"""Remove a label from the document
Args:
label: str Label to remove
label: str:
Returns:
the document
"""
self.labels.remove(label)
return self
@classmethod
def from_text(cls, text, separator=None):
"""Creates a new Document from the text provided.
Args:
text: str Text to be used as content on the Document's ContentNode(s)
separator: str If provided, this string will be used to split the text and the resulting text will be placed on children of the root ContentNode. (Default value = None)
Returns:
the document
"""
new_document = Document()
new_document.source.original_filename = f'text-{uuid.uuid4()}'
new_document.content_node = new_document.create_node(node_type='text', index=0)
if text:
if separator:
for s in text.split(separator):
new_document.content_node.add_child(new_document.create_node(node_type='text', content=s))
else:
new_document.content_node.content = text
new_document.add_mixin('text')
return new_document
def get_root(self):
"""Get the root content node for the document (same as content_node)"""
return self.content_node
def to_kdxa(self, file_path: str):
"""Write the document to the kdxa format (msgpack) which can be
used with the Kodexa platform
Args:
file_path: the path to the mdoc you wish to create
file_path: str:
Returns:
>>> document.to_mdoc('my-document.kdxa')
"""
with open(file_path, 'wb') as outfile:
msgpack.pack(self.to_dict(), outfile, use_bin_type=True)
@staticmethod
def open_kddb(file_path):
"""
Opens a Kodexa Document Database.
This is the Kodexa V4 default way to store documents, it provides high-performance
and also the ability to handle very large document objects
:param file_path: The file path
:return: The Document instance
"""
return Document(kddb_path=file_path)
def close(self):
"""
Close the document and clean up the resources
"""
self.get_persistence().close()
def to_kddb(self, path=None):
"""
Either write this document to a KDDB file or convert this document object structure into a KDDB and return a bytes-like object
This is dependent on whether you provide a path to write to
"""
if path is None:
return self.get_persistence().get_bytes()
else:
with open(path, 'wb') as output_file:
output_file.write(self.get_persistence().get_bytes())
@staticmethod
def from_kdxa(file_path):
"""Read an .kdxa file from the given file_path and
Args:
file_path: the path to the mdoc file
Returns:
>>> document = Document.from_kdxa('my-document.kdxa')
"""
with open(file_path, 'rb') as data_file:
data_loaded = msgpack.unpack(data_file, raw=False)
return Document.from_dict(data_loaded)
def to_msgpack(self):
"""Convert this document object structure into a message pack"""
return msgpack.packb(self.to_dict(), use_bin_type=True)
def to_json(self):
"""Create a JSON string representation of this Document.
Args:
Returns:
str: The JSON formatted string representation of this Document.
>>> document.to_json()
"""
return json.dumps(self.to_dict(), ensure_ascii=False)
def to_dict(self):
"""Create a dictionary representing this Document's structure and content.
Args:
Returns:
dict: A dictionary representation of this Document.
>>> document.to_dict()
"""
# We don't want to store the none values
def clean_none_values(d):
"""
Args:
d:
Returns:
"""
clean = {}
for k, v in d.items():
if isinstance(v, dict):
nested = clean_none_values(v)
if len(nested.keys()) > 0:
clean[k] = nested
elif v is not None:
clean[k] = v
return clean
return {'version': Document.CURRENT_VERSION, 'metadata': self.metadata,
'content_node': self.content_node.to_dict() if self.content_node else None,
'source': clean_none_values(dataclasses.asdict(self.source)),
'mixins': self._mixins,
'taxonomies': self.taxonomies,
'classes': [content_class.to_dict() for content_class in self.classes],
'exceptions': self.exceptions,
'log': self.log,
'labels': self.labels,
'uuid': self.uuid}
@staticmethod
def from_dict(doc_dict):
"""Build a new Document from a dictionary.
Args:
dict: doc_dict: A dictionary representation of a Kodexa Document.
doc_dict:
Returns:
Document: A complete Kodexa Document
>>> Document.from_dict(doc_dict)
"""
new_document = Document(DocumentMetadata(doc_dict['metadata']))
for mixin in doc_dict['mixins']:
registry.add_mixin_to_document(mixin, new_document)
new_document.version = doc_dict['version'] if 'version' in doc_dict and doc_dict[
'version'] else Document.PREVIOUS_VERSION # some older docs don't have a version or it's None
new_document.log = doc_dict['log'] if 'log' in doc_dict else []
new_document.exceptions = doc_dict['exceptions'] if 'exceptions' in doc_dict else []
new_document.uuid = doc_dict['uuid'] if 'uuid' in doc_dict else str(
uuid.uuid5(uuid.NAMESPACE_DNS, 'kodexa.com'))
if 'content_node' in doc_dict and doc_dict['content_node']:
new_document.content_node = ContentNode.from_dict(new_document, doc_dict['content_node'])
if 'source' in doc_dict and doc_dict['source']:
new_document.source = SourceMetadata.from_dict(doc_dict['source'])
if 'labels' in doc_dict and doc_dict['labels']:
new_document.labels = doc_dict['labels']
if 'taxomomies' in doc_dict and doc_dict['taxomomies']:
new_document.labels = doc_dict['taxomomies']
if 'classes' in doc_dict and doc_dict['classes']:
new_document.classes = [ContentClassification.from_dict(content_class) for content_class in
doc_dict['classes']]
new_document.get_persistence().update_metadata()
return new_document
@staticmethod
def from_json(json_string):
"""Create an instance of a Document from a JSON string.
Args:
str: json_string: A JSON string representation of a Kodexa Document
json_string:
Returns:
Document: A complete Kodexa Document
>>> Document.from_json(json_string)
"""
return Document.from_dict(json.loads(json_string))
@staticmethod
def from_msgpack(bytes):
"""Create an instance of a Document from a message pack byte array.
Args:
bytes: bytes: A message pack byte array.
Returns:
Document: A complete Kodexa Document
>>> Document.from_msgpack(open(os.path.join('news-doc.kdxa'), 'rb').read())
"""
return Document.from_dict(msgpack.unpackb(bytes, raw=False))
def get_mixins(self):
"""Get the list of mixins that have been enabled on this document."""
return self._mixins
def add_mixin(self, mixin):
"""Add the given mixin to this document, this will apply the mixin to all the content nodes,
and also register it with the document so that future invocations of create_node will ensure
the node has the mixin appled.
Args:
mixin:
Returns:
>>> document.add_mixin('spatial')
"""
registry.add_mixin_to_document(mixin, self)
def create_node(self, node_type: str, content: Optional[str] = None, virtual: bool = False,
parent: ContentNode = None,
index: Optional[int] = None):
"""
Creates a new node for the document. The new node is not added to the document, but any mixins that have been
applied to the document will also be available on the new node.
Args:
node_type (str): The type of node.
content (str): The content for the node; defaults to None.
virtual (bool): Indicates if this is a 'real' or 'virtual' node; default is False. 'Real' nodes contain
document content. 'Virtual' nodes are synthesized as necessary to fill gaps in between
non-consecutively indexed siblings. Such indexing arises when document content is sparse.
parent (ContentNode): The parent for this newly created node; default is None;
index (Optional[int)): The index property to be set on this node; default is 0;
Returns:
ContentNode: This newly created node.
>>> document.create_node(node_type='page')
<kodexa.model.model.ContentNode object at 0x7f80605e53c8>
"""
content_node = ContentNode(document=self, node_type=node_type, content=content,
parent=parent, index=index, virtual=virtual)
if parent is not None:
parent.add_child(content_node, index)
else:
self.get_persistence().add_content_node(content_node, None)
if content is not None and len(content_node.get_content_parts()) == 0:
content_node.set_content_parts([content])
return content_node
@classmethod
def from_kddb(cls, input, detached: bool = False):
"""
Loads a document from a Kodexa Document Database (KDDB) file
Args:
input: if a string we will load the file at that path, if bytes we will create a temp file and
load the KDDB to it
detached (bool): if reading from a file we will create a copy so we don't update in place
:return: the document
"""
if isinstance(input, str):
if isinstance(input, str):
document = Document(kddb_path=input)
if detached:
return Document.from_kddb(document.to_kddb())
else:
return document
else:
# We will assume the input is of byte type
import tempfile
fp = tempfile.NamedTemporaryFile(suffix='.kddb', delete=False)
fp.write(input)
fp.close()
return Document(kddb_path=fp.name, delete_on_close=True)
@classmethod
def from_file(cls, file, unpack: bool = False):
"""Creates a Document that has a 'file-handle' connector to the specified file.
Args:
file: file: The file to which the new Document is connected.
unpack: bool: (Default value = False)
Returns:
Document: A Document connected to the specified file.
"""
if unpack:
Document.from_kdxa(file)
else:
file_document = Document()
file_document.metadata.connector = 'file-handle'
file_document.metadata.connector_options.file = file
file_document.source.connector = 'file-handle'
file_document.source.original_filename = os.path.basename(file)
file_document.source.original_path = file
return file_document
@classmethod
def from_url(cls, url, headers=None):
"""Creates a Document that has a 'url' connector for the specified url.
Args:
str: url: The URL to which the new Document is connected.
dict: headers: Headers that should be used when reading from the URL
url:
headers: (Default value = None)
Returns:
Document: A Document connected to the specified URL with the specified headers (if any).
"""
if headers is None:
headers = {}
url_document = Document()
url_document.metadata.connector = 'url'
url_document.metadata.connector_options.url = url
url_document.metadata.connector_options.headers = headers
url_document.source.connector = 'url'
url_document.source.original_filename = url
url_document.source.original_path = url
url_document.source.headers = headers
return url_document
def select_first(self, selector, variables=None) -> Optional[ContentNode]:
"""Select and return the first child of this node that match the selector value.
Args:
selector (str): The selector (ie. //*)
variables (dict, optional): A dictionary of variable name/value to use in substituion; defaults to None. Dictionary keys should match a variable specified in the selector.
Returns:
Optional[ContentNode]: The first matching node or none
>>> document.get_root().select_first('.')
ContentNode
>>> document.get_root().select_first('//*[hasTag($tagName)]', {"tagName": "div"})
ContentNode
"""
result = self.select(selector, variables)
return result[0] if len(result) > 0 else None
def select(self, selector: str, variables: Optional[dict] = None) -> List[ContentNode]:
"""Execute a selector on the root node and then return a list of the matching nodes.
Args:
selector (str): The selector (ie. //*)
variables (Optional[dict): A dictionary of variable name/value to use in substituion; defaults to an empty dictionary. Dictionary keys should match a variable specified in the selector.
Returns:
list[ContentNodes]: A list of the matching ContentNodes. If no matches found, list is empty.
>>> document.select('.')
[ContentNode]
"""
if variables is None:
variables = {}
if self.content_node:
result = self.content_node.select(selector, variables)
if isinstance(result, list):
return result
else:
return [self.content_node] if bool(result) else []
else:
return []
def get_labels(self) -> List[str]:
"""
Args:
Returns:
List[str]: list of associated labels
"""
return self.labels
class DocumentStore(Store):
"""
A document store supports storing, listing and retrieving Kodexa documents and document families
"""
@abc.abstractmethod
def get_ref(self) -> str:
"""
Returns the reference (org-slug/store-slug:version)
Returns:
The reference of the document store (i.e. myorg/myslug:1.0.0)
"""
raise NotImplementedError
@abc.abstractmethod
def get_by_content_object_id(self, document_family: DocumentFamily, content_object_id: str) -> Optional[Document]:
"""Get a Document based on the ID of the ContentObject
Args:
document_family(DocumentFamily): The document family
content_object_id(str): the ID of the ContentObject
Returns:
A document (or None if not found)
"""
raise NotImplementedError
@abc.abstractmethod
def replace_content_object(self, document_family: DocumentFamily, content_object_id: str,
document: Document) -> Optional[DocumentFamily]:
"""Replace the document in a specific content object in a document family.
Args:
document_family (DocumentFamily): The document family
content_object_id (str): the ID of the ContentObject
document (Document): the document to replace the content object with
Returns:
The document family (or None if it wasn't found)
"""
raise NotImplementedError
@abc.abstractmethod
def put_native(self, path: str, content):
"""
Push content directly, this will create both a native object in the store and also a
related Document that refers to it.
:param path: the path where you want to put the native content
:param content: the binary content for the native file
:return: None
"""
raise NotImplementedError
@abc.abstractmethod
def get_family(self, document_family_id: str) -> Optional[DocumentFamily]:
"""
Returns a document family based on the ID
Args:
document_family_id (str): the ID of the document family
Returns:
The document family (or None if not found)
"""
raise NotImplementedError
@abc.abstractmethod
def delete(self, path: str):
"""
Delete the document family stored at the given path
Args:
path: the path to the content (ie. mymodel.dat)
Returns:
True if deleted, False if there was no file at the path
"""
raise NotImplementedError
@abc.abstractmethod
def add_related_document_to_family(self, document_family_id: str, transition: DocumentTransition,
document: Document):
"""Add a document to a family as a new transition
Args:
document_family_id (str): the ID for the document family
transition (DocumentTransition): the document transition
document (Document): the document
Returns:
None
"""
raise NotImplementedError
@abc.abstractmethod
def get_document_by_content_object(self, document_family: DocumentFamily, content_object: ContentObject) -> \
Optional[Document]:
"""
Get a document for a given content object
Args:
document_family (DocumentFamily): the document family
content_object (ContentObject): the content object
Returns:
the Document (or None if not found)
"""
raise NotImplementedError
@abc.abstractmethod
def get_source_by_content_object(self, document_family: DocumentFamily, content_object: ContentObject) -> \
Any:
"""
Get the source for a given content object
Args:
document_family (DocumentFamily): the document family
content_object (ContentObject): the content object
Returns:
the source (or None if not found)
"""
raise NotImplementedError
def query(self, query: str = "*"):
"""
Args:
query (str): The query (Default value = "*")
Returns:
"""
families = self.query_families(query)
self._draw_table(families)
@abc.abstractmethod
def register_listener(self, listener):
"""Register a listener to this store.
A store listener must have the method
process_event(content_event:ContentEvent)
Args:
listener: the listener to register
Returns:
None
"""
raise NotImplementedError
def _draw_table(self, objects):
"""Internal method to draw a table
Args:
objects: return:
Returns:
"""
from rich.table import Table
from rich import print
table = Table(title="Listing Objects")
cols = ['id', 'content_type', 'path']
for col in cols:
table.add_column(col)
for object_dict in objects:
row = []
for col in cols:
row.append(object_dict[col] if col in object_dict else '')
table.add_row(*row)
print(table)
@abc.abstractmethod
def query_families(self, query: str = "*", page: int = 1, page_size: int = 100) -> List[DocumentFamily]:
"""
Query the document families
Args:
page (int): The page number
page_size (int): The page size
query (str): The query (Default is *)
Returns:
A list of matching document families
"""
raise NotImplementedError
@abc.abstractmethod
def put(self, path: str, document: Document) -> DocumentFamily:
"""Puts a new document in the store with the given path.
There mustn't be a family in the path, this method will create a new family based around the
document
Args:
path (str): the path you wish to add the document in the store
document (Document): the document
Returns:
A new document family
"""
raise NotImplementedError
@abc.abstractmethod
def get_family_by_path(self, path: str) -> Optional[DocumentFamily]:
"""
Returns the document family (or None is not available) for a specific path in the store
Args:
path (str): the path within the store
Returns:
The document family, or None is no family exists at that path
"""
raise NotImplementedError
@abc.abstractmethod
def count(self) -> int:
"""The number of document families in the store
Returns:
the count of families
"""
raise NotImplementedError
def accept(self, document: Document):
"""Determine if the store will accept this document. This would typically mean that the store does
not yet have a document at the derived family path
Args:
document (Document): the document to check
Returns:
True if there is no current family at derived path, False is there is one
"""
return True
def get_latest_document_in_family(self, document_family: DocumentFamily) -> Optional[Document]:
"""
Returns the latest instance
Args:
document_family (DocumentFamily): The document family which we want the latest document in
Returns:
The last document to be stored in the family or None if there isn't one available
"""
last_co = document_family.content_objects[-1]
document = self.get_document_by_content_object(document_family, last_co)
if document is not None:
document.ref = f"{self.get_ref()}/{document_family.id}/{last_co.id}"
return document
class ModelStore(Store):
"""A model store supports storing and retrieving of a ML models"""
def get(self, path: str):
"""Returns the bytes object for the given path (or None is there nothing at that path)
Args:
path(str): the path to get content from
Returns:
Bytes or None is there is nothing at the path
"""
pass
def put(self, path: str, content: Any, replace=False) -> DocumentFamily:
"""
Args:
path (str): The path to put the content at
content: The content to put in the store
replace: Replace the object if it exists
Returns:
The document family that was created
"""
pass
def set_content_metadata(self, model_content_metadata: ModelContentMetadata):
"""
Updates the model content metadata for the model store
:param model_content_metadata: The metadata object
"""
pass
def get_content_metadata(self) -> ModelContentMetadata:
"""
Gets the latest model content metadata for the model store
:return: the model content metadata
"""
pass
def list_contents(self) -> List[str]:
"""
Returns a list of the objects that have been uploaded into this model store
:return: a list of the object names
"""
pass
class ContentObjectReference:
""" """
def __init__(self, content_object: ContentObject, store: DocumentStore, document: Document,
document_family):
self.content_object = content_object
self.store = store
self.document = document
from kodexa.model import DocumentFamily
self.document_family: DocumentFamily = document_family
|
NoctisGames/insectoid-defense_opensource | src/core/framework/ui/3ds/DSRectangleBatcher.cpp | <reponame>NoctisGames/insectoid-defense_opensource
//
// DSRectangleBatcher.cpp
// gowengamedev-framework
//
// Created by <NAME> on 8/25/15.
// Copyright (c) 2015 Gowen Game Dev. All rights reserved.
//
#include "DSRectangleBatcher.h"
#include "macros.h"
#include "Rectangle.h"
#include "Vector2D.h"
#include "GameConstants.h"
#include "DummyGpuProgramWrapper.h"
#include <sf2d.h>
#define RECT_LINE_THICKNESS GAME_WIDTH / 800.0f
DSRectangleBatcher::DSRectangleBatcher(gfxScreen_t screen, int screenWidth, int screenHeight, bool isFill) : RectangleBatcher(isFill), m_screen(screen), m_iScreenWidth(screenWidth), m_iScreenHeight(screenHeight)
{
m_iNumRectangles = 0;
}
void DSRectangleBatcher::beginBatch()
{
m_rects.clear();
m_iNumRectangles = 0;
}
void DSRectangleBatcher::endBatch()
{
endBatch(*DummyGpuProgramWrapper::getInstance());
}
void DSRectangleBatcher::endBatch(GpuProgramWrapper &gpuProgramWrapper)
{
if (m_iNumRectangles > 0)
{
if (m_isFill)
{
for (std::vector<RECT>::iterator itr = m_rects.begin(); itr != m_rects.end(); ++itr)
{
RECT r = *itr;
float width = r.x2 - r.x1;
float height = r.y2 - r.y1;
float x = r.x1 + width / 2;
float y = r.y1 + height / 2;
sf2d_draw_rectangle(x, y, width, height, RGBA8((int) (r.r * 255), (int) (r.g * 255), (int) (r.b * 255), (int) (r.a * 255)));
}
}
else
{
for (std::vector<RECT>::iterator itr = m_rects.begin(); itr != m_rects.end(); ++itr)
{
RECT r = *itr;
sf2d_draw_line(r.x1, r.y1, r.x2, r.y1, RECT_LINE_THICKNESS, RGBA8((int) (r.r * 255), (int) (r.g * 255), (int) (r.b * 255), (int) (r.a * 255)));
sf2d_draw_line(r.x2, r.y1, r.x2, r.y2, RECT_LINE_THICKNESS, RGBA8((int) (r.r * 255), (int) (r.g * 255), (int) (r.b * 255), (int) (r.a * 255)));
sf2d_draw_line(r.x2, r.y2, r.x1, r.y2, RECT_LINE_THICKNESS, RGBA8((int) (r.r * 255), (int) (r.g * 255), (int) (r.b * 255), (int) (r.a * 255)));
sf2d_draw_line(r.x1, r.y2, r.x1, r.y1, RECT_LINE_THICKNESS, RGBA8((int) (r.r * 255), (int) (r.g * 255), (int) (r.b * 255), (int) (r.a * 255)));
}
}
}
}
void DSRectangleBatcher::renderRectangle(float x1, float y1, float x2, float y2, Color &c)
{
RECT r = {x1, GAME_HEIGHT - y1, x2, GAME_HEIGHT - y2, c.red, c.green, c.blue, c.alpha};
m_rects.push_back(r);
m_iNumRectangles++;
} |
miaopei/B4860-test | learn-uv-api/net_uv_new/Examples/Apps/Common/Application/Include/texture/TextureCache.h | #pragma once
#include "imgui.h"
#include <map>
#include <vector>
class TextureCache
{
static TextureCache* TextureCacheInstance;
public:
static TextureCache* getInstance();
static void destroy();
~TextureCache();
ImTextureID loadTexture(const std::string& textureName);
void releaseTexture(ImTextureID textureId);
void releaseTextureByName(const std::string& textureName);
void releaseAll();
void addSearchPath(const std::string& path);
std::string getPath(const std::string& path);
protected:
std::map<std::string, ImTextureID> textureCacheMap;
std::vector<std::string> searchPaths;
};
|
neo4jrb/neo4j-driver | ruby/neo4j/driver/internal/async/connection/event_loop_group_factory.rb | module Neo4j::Driver
module Internal
module Async
module Connection
class EventLoopGroupFactory
THREAD_NAME_PREFIX = "Neo4jDriverIO"
THREAD_PRIORITY = 10
THREAD_IS_DAEMON = true
class << self
# Get class of {@link Channel} for {@link Bootstrap#channel(Class)} method.
# @return class of the channel, which should be consistent with {@link EventLoopGroup}s returned by
# {@link #newEventLoopGroup(int)}.
def channel_class
# org.neo4j.driver.internal.shaded.io.netty.channel.socket.nio.NioSocketChannel
end
# Create new {@link EventLoopGroup} with specified thread count. Returned group should by given to
# {@link Bootstrap#group(EventLoopGroup)}.
# @param threadCount amount of IO threads for the new group.
# @return new group consistent with channel class returned by {@link #channelClass()}.
def new_event_loop_group(thread_count)
DriverEventLoopGroup.new(thread_count)
end
# Assert that current thread is not an event loop used for async IO operations. This check is needed because
# blocking API methods like {@link Session#run(String)} are implemented on top of corresponding async API methods
# like {@link AsyncSession#runAsync(String)} using basically {@link Future#get()} calls. Deadlocks might happen when IO
# thread executes blocking API call and has to wait for itself to read from the network.
# @throws IllegalStateException when current thread is an event loop IO thread.
def assert_not_in_event_loop_thread
if event_loop_thread?(Thread.current)
raise Exceptions::IllegalStateException, "Blocking operation can't be executed in IO thread because it might result in a deadlock. Please do not use blocking API when chaining futures returned by async API methods."
end
end
# Check if given thread is an event loop IO thread.
# @param thread the thread to check.
# @return {@code true} when given thread belongs to the event loop, {@code false} otherwise.
def event_loop_thread?(thread)
thread.is_a?(DriverThread)
end
end
private
# Same as {@link NioEventLoopGroup} but uses a different {@link ThreadFactory} that produces threads of
# {@link DriverThread} class. Such threads can be recognized by {@link #assertNotInEventLoopThread()}.
class DriverEventLoopGroup #< org.neo4j.driver.internal.shaded.io.netty.channel.nio.NioEventLoopGroup
def initialize(n_threads = nil)
end
protected
def new_default_thread_factory
DriverThreadFactory.new
end
end
# Same as {@link DefaultThreadFactory} created by {@link NioEventLoopGroup} by default, except produces threads of
# {@link DriverThread} class. Such threads can be recognized by {@link #assertNotInEventLoopThread()}.
class DriverThreadFactory #< org.neo4j.driver.internal.shaded.io.netty.util.concurrent.DefaultThreadFactory
def initialize()
super(THREAD_NAME_PREFIX, THREAD_IS_DAEMON, THREAD_PRIORITY)
end
def new_thread(r, name)
DriverThread.new(@thread_group, r, name)
end
end
class DriverThread #< org.neo4j.driver.internal.shaded.io.netty.util.concurrent.FastThreadLocalThread
end
end
end
end
end
end
|
Couso99/PruebasTerapiaOcupacional | app/src/main/java/com/imovil/NEURmetrics/models/Test.java | package com.imovil.NEURmetrics.models;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.io.Serializable;
import java.util.List;
public class Test implements Serializable {
@SerializedName("isContainsTests")
@Expose
private boolean isContainsTests;
@SerializedName("tests")
@Expose
private List<Test> tests;
@SerializedName("name")
@Expose
private String name;
@SerializedName("testType")
@Expose
private int testType;
@SerializedName("testID")
@Expose
private String testID;
@SerializedName("title")
@Expose
private String title;
@SerializedName("h1")
@Expose
private String h1;
@SerializedName("h2")
@Expose
private String h2;
@SerializedName("comment")
@Expose
private String comment;
@SerializedName("parametersNumber")
@Expose
private int parametersNumber;
@SerializedName("parametersType")
@Expose
private List<String> parametersType=null;
@SerializedName("parameters")
@Expose
private List<String> parameters=null;
@SerializedName("score")
@Expose
private int score;
@SerializedName("expandedScore")
@Expose
private List<Integer> expandedScore=null;
@SerializedName("maxScore")
@Expose
private int maxScore;
@SerializedName("scoreOptions")
@Expose
private List<String> scoreOptions=null;
@SerializedName("scoreWeights")
@Expose
private List<Integer> scoreWeights=null;
@SerializedName("outputsNumber")
@Expose
private int outputsNumber;
@SerializedName("outputsType")
@Expose
private List<String> outputsType=null;
@SerializedName("outputs")
@Expose
private List<String> outputs=null;
@SerializedName("startTestTimeOffset")
@Expose
private long startTestTimeOffset;
@SerializedName("stopTestTimeOffset")
@Expose
private long stopTestTimeOffset;
public boolean isContainsTests() {
return isContainsTests;
}
public void setContainsTests(boolean containsTests) {
isContainsTests = containsTests;
}
public List<Test> getTests() {
return tests;
}
public void setTests(List<Test> tests) {
this.tests = tests;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getTestType() {
return testType;
}
public void setTestType(int testType) {
this.testType = testType;
}
public String getTestID() {
return testID;
}
public void setTestID(String testID) {
this.testID = testID;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getH1() {
return h1;
}
public void setH1(String h1) {
this.h1 = h1;
}
public String getH2() {
return h2;
}
public void setH2(String h2) {
this.h2 = h2;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public int getParametersNumber() {
return parametersNumber;
}
public void setParametersNumber(int parametersNumber) {
this.parametersNumber = parametersNumber;
}
public List<String> getParametersType() {
return parametersType;
}
public void setParametersType(List<String> parametersType) {
this.parametersType = parametersType;
}
public List<String> getParameters() {
return parameters;
}
public void setParameters(List<String> parameters) {
this.parameters = parameters;
}
public int getScore() {
return score;
}
public void setScore(int score) {
this.score = score;
}
public List<Integer> getExpandedScore() {
return expandedScore;
}
public void setExpandedScore(List<Integer> expandedScore) {
this.expandedScore = expandedScore;
}
public int getMaxScore() {
return maxScore;
}
public void setMaxScore(int maxScore) {
this.maxScore = maxScore;
}
public List<String> getScoreOptions() {
return scoreOptions;
}
public void setScoreOptions(List<String> scoreOptions) {
this.scoreOptions = scoreOptions;
}
public List<Integer> getScoreWeights() {
return scoreWeights;
}
public void setScoreWeights(List<Integer> scoreWeights) {
this.scoreWeights = scoreWeights;
}
public int getOutputsNumber() {
return outputsNumber;
}
public void setOutputsNumber(int outputsNumber) {
this.outputsNumber = outputsNumber;
}
public List<String> getOutputsType() {
return outputsType;
}
public void setOutputsType(List<String> outputsType) {
this.outputsType = outputsType;
}
public List<String> getOutputs() {
return outputs;
}
public void setOutputs(List<String> outputs) {
this.outputs = outputs;
}
public long getStartTestTimeOffset() {
return startTestTimeOffset;
}
public void setStartTestTimeOffset(long startTestTimeOffset) {
this.startTestTimeOffset = startTestTimeOffset;
}
public long getStopTestTimeOffset() {
return stopTestTimeOffset;
}
public void setStopTestTimeOffset(long stopTestTimeOffset) {
this.stopTestTimeOffset = stopTestTimeOffset;
}
} |
eduardoxcruz/BinanceFuturesSdk | src/main/java/com/binance/futures/sdk/model/rest_api_response/market/order_book/OrderBook.java | package com.binance.futures.sdk.model.rest_api_response.market.order_book;
import com.binance.futures.sdk.constant.BinanceApiConstants;
import com.binance.futures.sdk.model.ResponseTimestamp;
import org.apache.commons.lang3.builder.ToStringBuilder;
import java.util.List;
public class OrderBook extends ResponseTimestamp {
private Long lastUpdateId;
private Long messageOutputTime;
private String parsedMessageOutputTime;
private List<BidOrAsk> bids;
private List<BidOrAsk> asks;
public String getParsedMessageOutputTime() {
return parsedMessageOutputTime;
}
public void setParsedMessageOutputTime(String parsedMessageOutputTime) {
this.parsedMessageOutputTime = parsedMessageOutputTime;
}
public Long getLastUpdateId() {
return lastUpdateId;
}
public void setLastUpdateId(Long lastUpdateId) {
this.lastUpdateId = lastUpdateId;
}
public List<BidOrAsk> getBids() {
return bids;
}
public void setBids(List<BidOrAsk> bids) {
this.bids = bids;
}
public List<BidOrAsk> getAsks() {
return asks;
}
public void setAsks(List<BidOrAsk> asks) {
this.asks = asks;
}
public Long getMessageOutputTime() {
return messageOutputTime;
}
public void setMessageOutputTime(Long messageOutputTime) {
this.messageOutputTime = messageOutputTime;
setParsedMessageOutputTime(parseTimestampToDateString(messageOutputTime));
}
@Override
public String toString() {
return new ToStringBuilder(this, BinanceApiConstants.TO_STRING_BUILDER_STYLE)
.append("lastUpdateId", getLastUpdateId())
.append("messageOutputTime", getMessageOutputTime())
.append("parsedMessageOutputTime", getParsedMessageOutputTime())
.append("transactionTime", getTransactionTime())
.append("parsedTransactionTime", getParsedTransactionTime())
.append("bids", getBids())
.append("asks", getAsks())
.toString();
}
}
|
sunadm/ClickHouse | dbms/src/Parsers/ASTShowCreateAccessEntityQuery.cpp | #include <Parsers/ASTShowCreateAccessEntityQuery.h>
#include <Common/quoteString.h>
namespace DB
{
namespace
{
using Kind = ASTShowCreateAccessEntityQuery::Kind;
const char * kindToKeyword(Kind kind)
{
switch (kind)
{
case Kind::QUOTA: return "QUOTA";
case Kind::ROW_POLICY: return "POLICY";
}
__builtin_unreachable();
}
}
ASTShowCreateAccessEntityQuery::ASTShowCreateAccessEntityQuery(Kind kind_)
: kind(kind_), keyword(kindToKeyword(kind_))
{
}
String ASTShowCreateAccessEntityQuery::getID(char) const
{
return String("SHOW CREATE ") + keyword + " query";
}
ASTPtr ASTShowCreateAccessEntityQuery::clone() const
{
return std::make_shared<ASTShowCreateAccessEntityQuery>(*this);
}
void ASTShowCreateAccessEntityQuery::formatQueryImpl(const FormatSettings & settings, FormatState &, FormatStateStacked) const
{
settings.ostr << (settings.hilite ? hilite_keyword : "")
<< "SHOW CREATE " << keyword
<< (settings.hilite ? hilite_none : "");
if (kind == Kind::ROW_POLICY)
{
const String & database = row_policy_name.database;
const String & table_name = row_policy_name.table_name;
const String & policy_name = row_policy_name.policy_name;
settings.ostr << ' ' << backQuoteIfNeed(policy_name) << (settings.hilite ? hilite_keyword : "") << " ON "
<< (settings.hilite ? hilite_none : "") << (database.empty() ? String{} : backQuoteIfNeed(database) + ".")
<< backQuoteIfNeed(table_name);
}
else if ((kind == Kind::QUOTA) && current_quota)
settings.ostr << (settings.hilite ? hilite_keyword : "") << " CURRENT" << (settings.hilite ? hilite_none : "");
else
settings.ostr << " " << backQuoteIfNeed(name);
}
}
|
nick-bair/the-doctor | src/util/createFormulas.js | 'use strict';
const {map, find, propEq, mergeAll, curry, pipeP} = require('ramda');
const get = require('./get');
const postFormula = require('./post')('formulas');
const applyVersion = require('../util/applyVersion')
const makePath = formula => `formulas/${formula.id}`;
const update = require('./update');
const createFormula = curry(async (endpointFormulas,formula) => {
const endpointFormula = find(propEq('name' ,formula.name))(endpointFormulas)
if(endpointFormula) {
return { [formula.id]: endpointFormula.id }
} else {
const result = await postFormula(formula)
console.log(`Created Formula: ${formula.name}`)
return { [formula.id]: result.id }
}
})
const updateFormula = async formula => {
await update(makePath(formula), formula)
console.log(`Updated Formula: ${formula.name}`)
}
module.exports = async (formulas) => {
const endpointFormulas = await get('formulas')
let formulaIds = mergeAll(await Promise.all(map(createFormula(endpointFormulas))(formulas)))
const fixSteps = map(s => s.type === 'formula'? ({ ...s, properties: { formulaId: formulaIds[s.properties.formulaId] } }) : s)
const newFormulas = map(f => ({
...f,
id: formulaIds[f.id],
steps: fixSteps(f.steps),
subFormulas: f.subFormulas ? map(s => ({
...s,
id: formulaIds[s.id],
steps: fixSteps(s.steps)
}))(f.subFormulas) : []
}))(formulas)
return Promise.all(map(updateFormula)(newFormulas))
} |
szpak/micro-infra-spring | swagger/micro-infra-spring-swagger/src/main/java/repackaged/com/mangofactory/swagger/readers/operation/SwaggerParameterReader.java | <filename>swagger/micro-infra-spring-swagger/src/main/java/repackaged/com/mangofactory/swagger/readers/operation/SwaggerParameterReader.java
package repackaged.com.mangofactory.swagger.readers.operation;
import repackaged.com.mangofactory.swagger.scanners.RequestMappingContext;
import com.mangofactory.swagger.models.dto.Parameter;
import java.util.Collection;
import java.util.List;
import static com.google.common.collect.Lists.*;
public abstract class SwaggerParameterReader implements RequestMappingReader {
@Override
public final void execute(RequestMappingContext context) {
List<Parameter> parameters = (List<Parameter>) context.get("parameters");
if (parameters == null) {
parameters = newArrayList();
}
parameters.addAll(this.readParameters(context));
context.put("parameters", parameters);
}
abstract protected Collection<? extends Parameter> readParameters(RequestMappingContext context);
}
|
yangwei258/GAF | gaf-cloud/gaf-commons/gaf-common-api-scanner/src/main/java/com/supermap/gaf/api/scanner/enums/ComponentTypeEnum.java | <gh_stars>10-100
package com.supermap.gaf.api.scanner.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
@AllArgsConstructor
@Getter
public enum ComponentTypeEnum {
/**
* 前端
*/
FRONTEND("1"),
/**
* 后端
*/
BACKEND("2"),
/**
* 移动端
*/
MOBILE("3"),
/**
* 前后端
*/
FRONT_BACK_END("4");
private String value;
}
|
pepoc/CustomViewGather | KuaiKuai/src/me/kkuai/kuailian/db/ChatMsg.java | package me.kkuai.kuailian.db;
public class ChatMsg {
public static String ID = "_id";
public static String SENDER_UID = "senderUid";
public static String FRIEND_UID = "friendUid";
public static String SELF_UID = "selfUid";
public static String M_ID = "m_id";
public static String M_MSG_ID = "m_msgId";
public static String CLIENT_UNIQUE_ID = "clientUniqueId";
public static String MSG_CONTENT = "msgContent";
public static String MSG_TYPE = "msgType";
public static String MSG_STATUS = "msgStatus";
public static String EXTRA_FLAG = "extraFlag";
public static String RECEIVE_TIME = "receiveTime";
public static String READ_TIME = "readTime";
public static String SEND_TIME = "sendTime";
public static String createTable(String tableName) {
String CREATE_TABLE = new StringBuilder().append("CREATE TABLE IF NOT EXISTS ").append(tableName)
.append(" (")
.append(ID).append(" INTEGER PRIMARY KEY AUTOINCREMENT,")
.append(SENDER_UID).append(" text,")
.append(FRIEND_UID).append(" text,")
.append(SELF_UID).append(" text,")
.append(M_ID).append(" text,")
.append(M_MSG_ID).append(" text,")
.append(CLIENT_UNIQUE_ID).append(" text,")
.append(MSG_CONTENT).append(" text,")
.append(MSG_TYPE).append(" text,")
.append(MSG_STATUS).append(" text,")
.append(RECEIVE_TIME).append(" long,")
.append(SEND_TIME).append(" long,")
.append(READ_TIME).append(" long,")
.append(EXTRA_FLAG).append(" text)")
.toString();
return CREATE_TABLE;
}
}
|
JsaParDeveloper/jsapar | src/test/java/org/jsapar/model/LocalTimeCellTest.java | package org.jsapar.model;
import org.junit.Test;
import java.time.LocalTime;
import static org.junit.Assert.*;
public class LocalTimeCellTest {
@Test
public void compareValueTo() {
LocalTimeCell c1 = new LocalTimeCell("test1", LocalTime.of(12,45));
LocalTimeCell c2 = new LocalTimeCell("test2", LocalTime.of(12,45));
LocalTimeCell c3 = new LocalTimeCell("test3", LocalTime.of(10,45));
assertEquals(0, c1.compareValueTo(c2));
assertTrue(c1.compareValueTo(c3) >0);
assertTrue(c3.compareValueTo(c1) <0);
}
@Test
public void emptyOf() {
Cell cell = LocalTimeCell.emptyOf("test");
assertEquals("test", cell.getName());
assertTrue(cell.isEmpty());
}
} |
jayvdb/bluebottle | bluebottle/activities/tests/test_api.py | <reponame>jayvdb/bluebottle
from builtins import str
import json
from datetime import timedelta
import dateutil
from django.contrib.auth.models import Group, Permission
from django.contrib.gis.geos import Point
from django.test import tag
from django.test.utils import override_settings
from django.urls import reverse
from django.utils.timezone import now
from django_elasticsearch_dsl.test import ESTestCase
from rest_framework import status
from bluebottle.assignments.tests.factories import AssignmentFactory, ApplicantFactory
from bluebottle.files.tests.factories import ImageFactory
from bluebottle.events.tests.factories import EventFactory, ParticipantFactory
from bluebottle.funding.tests.factories import FundingFactory, DonationFactory
from bluebottle.initiatives.tests.factories import InitiativeFactory
from bluebottle.members.models import MemberPlatformSettings
from bluebottle.segments.tests.factories import SegmentFactory
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from bluebottle.test.factory_models.geo import LocationFactory, GeolocationFactory, PlaceFactory, CountryFactory
from bluebottle.test.factory_models.projects import ProjectThemeFactory
from bluebottle.test.factory_models.tasks import SkillFactory
from bluebottle.test.utils import BluebottleTestCase, JSONAPITestClient
@override_settings(
ELASTICSEARCH_DSL_AUTOSYNC=True,
ELASTICSEARCH_DSL_AUTO_REFRESH=True
)
@tag('elasticsearch')
class ActivityListSearchAPITestCase(ESTestCase, BluebottleTestCase):
def setUp(self):
super(ActivityListSearchAPITestCase, self).setUp()
self.client = JSONAPITestClient()
self.url = reverse('activity-list')
self.owner = BlueBottleUserFactory.create()
def test_images(self):
EventFactory.create(
owner=self.owner, review_status='approved', image=ImageFactory.create()
)
AssignmentFactory.create(review_status='approved', image=ImageFactory.create())
FundingFactory.create(review_status='approved', image=ImageFactory.create())
response = self.client.get(self.url, user=self.owner)
for activity in response.json()['data']:
self.assertEqual(
activity['relationships']['image']['data']['type'],
'images'
)
def test_no_filter(self):
succeeded = EventFactory.create(
owner=self.owner, status='succeeded'
)
open = EventFactory.create(status='open')
EventFactory.create(status='submitted')
EventFactory.create(status='closed')
response = self.client.get(self.url, user=self.owner)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 2)
self.assertEqual(data['data'][1]['id'], str(succeeded.pk))
self.assertEqual(data['data'][0]['id'], str(open.pk))
self.assertTrue('meta' in data['data'][0])
def test_anonymous(self):
succeeded = EventFactory.create(
owner=self.owner, status='succeeded'
)
open = EventFactory.create(status='open')
EventFactory.create(status='submitted')
EventFactory.create(status='closed')
response = self.client.get(self.url)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 2)
self.assertEqual(data['data'][1]['id'], str(succeeded.pk))
self.assertEqual(data['data'][0]['id'], str(open.pk))
self.assertTrue('meta' in data['data'][0])
def test_filter_owner(self):
EventFactory.create(owner=self.owner, status='open')
EventFactory.create(status='open')
response = self.client.get(
self.url + '?filter[owner.id]={}'.format(self.owner.pk),
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 1)
self.assertEqual(data['data'][0]['relationships']['owner']['data']['id'], str(self.owner.pk))
def test_only_owner_permission(self):
EventFactory.create(owner=self.owner, status='open')
EventFactory.create(status='open')
authenticated = Group.objects.get(name='Authenticated')
authenticated.permissions.remove(
Permission.objects.get(codename='api_read_activity')
)
authenticated.permissions.add(
Permission.objects.get(codename='api_read_own_activity')
)
response = self.client.get(
self.url,
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 1)
self.assertEqual(data['data'][0]['relationships']['owner']['data']['id'], str(self.owner.pk))
def test_initiative_location(self):
location = LocationFactory.create()
initiative = InitiativeFactory.create(status='open', location=location)
activity = EventFactory.create(status='open', initiative=initiative)
EventFactory.create(status='open')
response = self.client.get(
self.url + '?filter[initiative_location.id]={}'.format(location.pk),
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 1)
self.assertEqual(data['data'][0]['id'], str(activity.pk))
def test_activity_date_filter(self):
next_month = now() + dateutil.relativedelta.relativedelta(months=1)
after = now() + dateutil.relativedelta.relativedelta(months=2)
event = EventFactory.create(
status='open',
start=next_month
)
EventFactory.create(
status='open',
start=after
)
on_date_assignment = AssignmentFactory.create(
status='open',
date=next_month,
end_date_type='on_date'
)
AssignmentFactory.create(
status='open',
date=after,
end_date_type='on_date'
)
deadline_assignment = AssignmentFactory.create(
status='open',
date=next_month,
end_date_type='deadline'
)
# Feature is not dealing with time. Disabling timezone check for test
funding = FundingFactory.create(
status='open',
deadline=next_month
)
FundingFactory.create(
status='open',
deadline=after
)
response = self.client.get(
self.url + '?filter[date]={}-{}-{}'.format(after.year, after.month, after.day),
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 3)
response = self.client.get(
self.url + '?filter[date]={}-{}-{}'.format(
next_month.year, next_month.month, next_month.day
),
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 4)
found = [item['id'] for item in data['data']]
self.assertTrue(str(event.pk) in found)
self.assertTrue(str(on_date_assignment.pk) in found)
self.assertTrue(str(deadline_assignment.pk) in found)
self.assertTrue(str(funding.pk) in found)
def test_filter_segment(self):
segment = SegmentFactory.create()
first = EventFactory.create(
status='open',
)
first.segments.add(segment)
EventFactory.create(
status='open'
)
response = self.client.get(
self.url + '?filter[segment.{}]={}'.format(
segment.type.slug, segment.pk
),
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 1)
self.assertEqual(data['data'][0]['id'], str(first.pk))
def test_filter_segment_mismatch(self):
first = EventFactory.create(
status='open',
)
first_segment = SegmentFactory.create()
first.segments.add(first_segment)
second_segment = SegmentFactory.create()
first.segments.add(second_segment)
EventFactory.create(
status='open'
)
response = self.client.get(
self.url + '?filter[segment.{}]={}'.format(
first_segment.type.slug, second_segment.pk
),
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 0)
def test_search(self):
first = EventFactory.create(
title='Lorem ipsum dolor sit amet',
description="Lorem ipsum",
status='open'
)
second = EventFactory.create(title='Lorem ipsum dolor sit amet', status='open')
response = self.client.get(
self.url + '?filter[search]=lorem ipsum',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 2)
self.assertEqual(data['data'][0]['id'], str(first.pk))
self.assertEqual(data['data'][1]['id'], str(second.pk))
def test_search_different_type(self):
first = EventFactory.create(
title='Lorem ipsum dolor sit amet',
description="Lorem ipsum",
status='open'
)
second = FundingFactory.create(title='Lorem ipsum dolor sit amet', status='open')
response = self.client.get(
self.url + '?filter[search]=lorem ipsum',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 2)
self.assertEqual(data['data'][0]['id'], str(first.pk))
self.assertEqual(data['data'][0]['type'], 'activities/events')
self.assertEqual(data['data'][1]['id'], str(second.pk))
self.assertEqual(data['data'][1]['type'], 'activities/fundings')
def test_search_boost(self):
first = EventFactory.create(
title='Something else',
description='Lorem ipsum dolor sit amet',
status='open'
)
second = EventFactory.create(
title='Lorem ipsum dolor sit amet',
description="Something else",
status='open'
)
response = self.client.get(
self.url + '?filter[search]=lorem ipsum',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 2)
self.assertEqual(data['data'][0]['id'], str(second.pk))
self.assertEqual(data['data'][1]['id'], str(first.pk))
def test_search_formatted_address(self):
location = GeolocationFactory.create(formatted_address='Roggeveenstraat')
first = EventFactory.create(
location=location,
status='open'
)
second = EventFactory.create(
title='Roggeveenstraat',
status='open'
)
EventFactory.create(
status='open'
)
response = self.client.get(
self.url + '?filter[search]=Roggeveenstraat',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 2)
self.assertEqual(data['data'][0]['id'], str(second.pk))
self.assertEqual(data['data'][1]['id'], str(first.pk))
def test_search_initiative_title(self):
first = EventFactory.create(
initiative=InitiativeFactory.create(title='Test title'),
status='open'
)
second = EventFactory.create(
title='Test title',
status='open'
)
EventFactory.create(
status='open'
)
response = self.client.get(
self.url + '?filter[search]=test title',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 2)
self.assertEqual(data['data'][0]['id'], str(second.pk))
self.assertEqual(data['data'][1]['id'], str(first.pk))
def test_search_segment_name(self):
first = EventFactory.create(
status='open',
)
first.segments.add(SegmentFactory(name='Online Marketing'))
EventFactory.create(
status='open'
)
response = self.client.get(
self.url + '?filter[search]=marketing',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 1)
self.assertEqual(data['data'][0]['id'], str(first.pk))
def test_sort_title(self):
second = EventFactory.create(title='B: something else', status='open')
first = EventFactory.create(title='A: something', status='open')
third = EventFactory.create(title='C: More', status='open')
response = self.client.get(
self.url + '?sort=alphabetical',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 3)
self.assertEqual(data['data'][0]['id'], str(first.pk))
self.assertEqual(data['data'][1]['id'], str(second.pk))
self.assertEqual(data['data'][2]['id'], str(third.pk))
def test_sort_activity_date(self):
first = EventFactory.create(
status='open',
start=now() + timedelta(days=10)
)
second = EventFactory.create(
status='open',
start=now() + timedelta(days=9)
)
third = EventFactory.create(
status='open',
start=now() + timedelta(days=11)
)
response = self.client.get(
self.url + '?sort=date',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 3)
self.assertEqual(data['data'][0]['id'], str(third.pk))
self.assertEqual(data['data'][1]['id'], str(first.pk))
self.assertEqual(data['data'][2]['id'], str(second.pk))
def test_sort_matching_popularity(self):
first = EventFactory.create(status='open')
second = EventFactory.create(status='open')
ParticipantFactory.create(
activity=second, created=now() - timedelta(days=7)
)
third = EventFactory.create(status='open')
ParticipantFactory.create(
activity=third, created=now() - timedelta(days=5)
)
fourth = EventFactory.create(status='open')
ParticipantFactory.create(
activity=fourth, created=now() - timedelta(days=7)
)
ParticipantFactory.create(
activity=fourth, created=now() - timedelta(days=5)
)
response = self.client.get(
self.url + '?sort=popularity',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 4)
self.assertEqual(data['data'][0]['id'], str(fourth.pk))
self.assertEqual(data['data'][1]['id'], str(third.pk))
self.assertEqual(data['data'][2]['id'], str(second.pk))
self.assertEqual(data['data'][3]['id'], str(first.pk))
def test_sort_matching_status(self):
EventFactory.create(status='closed')
second = EventFactory.create(status='succeeded')
ParticipantFactory.create(activity=second)
third = EventFactory.create(
status='open',
capacity=1
)
ParticipantFactory.create(activity=third)
fourth = EventFactory.create(status='running')
ParticipantFactory.create(activity=fourth)
fifth = EventFactory.create(status='open')
ParticipantFactory.create(activity=fifth)
response = self.client.get(
self.url + '?sort=popularity',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 4)
self.assertEqual(data['data'][0]['id'], str(fifth.pk))
self.assertEqual(data['data'][1]['id'], str(fourth.pk))
self.assertEqual(data['data'][2]['id'], str(third.pk))
self.assertEqual(data['data'][3]['id'], str(second.pk))
def test_sort_matching_skill(self):
skill = SkillFactory.create()
self.owner.skills.add(skill)
self.owner.save()
first = AssignmentFactory.create(status='full')
ApplicantFactory.create_batch(3, activity=first, status='accepted')
second = AssignmentFactory.create(status='full', expertise=skill)
ApplicantFactory.create_batch(3, activity=second, status='accepted')
third = AssignmentFactory.create(status='open')
fourth = AssignmentFactory.create(status='open', expertise=skill)
response = self.client.get(
self.url + '?sort=popularity',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 4)
self.assertEqual(data['data'][0]['id'], str(fourth.pk))
self.assertEqual(data['data'][1]['id'], str(third.pk))
self.assertEqual(data['data'][2]['id'], str(second.pk))
self.assertEqual(data['data'][3]['id'], str(first.pk))
def test_sort_matching_theme(self):
theme = ProjectThemeFactory.create()
self.owner.favourite_themes.add(theme)
self.owner.save()
initiative = InitiativeFactory.create(theme=theme)
first = EventFactory.create(status='open', capacity=1)
ParticipantFactory.create(activity=first)
second = EventFactory.create(
status='open',
initiative=initiative,
capacity=1
)
ParticipantFactory.create(activity=second)
third = EventFactory.create(status='open')
ParticipantFactory.create(activity=third)
fourth = EventFactory.create(status='open', initiative=initiative)
ParticipantFactory.create(activity=fourth)
response = self.client.get(
self.url + '?sort=popularity',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 4)
self.assertEqual(data['data'][0]['id'], str(fourth.pk))
self.assertEqual(data['data'][1]['id'], str(third.pk))
self.assertEqual(data['data'][2]['id'], str(second.pk))
self.assertEqual(data['data'][3]['id'], str(first.pk))
def test_sort_matching_location(self):
PlaceFactory.create(content_object=self.owner, position='10.0, 20.0')
first = AssignmentFactory.create(status='full')
ApplicantFactory.create_batch(3, activity=first, status='accepted')
second = AssignmentFactory.create(
status='full',
is_online=False,
location=GeolocationFactory.create(position=Point(20.0, 10))
)
ApplicantFactory.create_batch(3, activity=second, status='accepted')
third = AssignmentFactory.create(
status='open',
is_online=False,
)
fourth = AssignmentFactory.create(
status='open',
is_online=False,
location=GeolocationFactory.create(position=Point(21.0, 9.0))
)
fifth = AssignmentFactory.create(
is_online=False,
status='open', location=GeolocationFactory.create(position=Point(20.0, 10.0))
)
response = self.client.get(
self.url + '?sort=popularity',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 5)
self.assertEqual(data['data'][0]['id'], str(fifth.pk))
self.assertEqual(data['data'][1]['id'], str(fourth.pk))
self.assertEqual(data['data'][2]['id'], str(third.pk))
self.assertEqual(data['data'][3]['id'], str(second.pk))
self.assertEqual(data['data'][4]['id'], str(first.pk))
def test_filter_country(self):
country1 = CountryFactory.create()
country2 = CountryFactory.create()
initiative1 = InitiativeFactory.create(place=GeolocationFactory.create(country=country1))
initiative2 = InitiativeFactory.create(place=GeolocationFactory.create(country=country2))
initiative3 = InitiativeFactory.create(place=GeolocationFactory.create(country=country1))
initiative4 = InitiativeFactory.create(place=GeolocationFactory.create(country=country2))
first = AssignmentFactory.create(status='full', initiative=initiative1)
ApplicantFactory.create_batch(3, activity=first, status='accepted')
second = AssignmentFactory.create(status='open', initiative=initiative3)
third = AssignmentFactory.create(status='full', initiative=initiative2)
ApplicantFactory.create_batch(3, activity=third, status='accepted')
AssignmentFactory.create(status='open', initiative=initiative4)
response = self.client.get(
self.url + '?sort=popularity&filter[country]={}'.format(country1.id),
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 2)
self.assertEqual(data['data'][0]['id'], str(second.pk))
self.assertEqual(data['data'][1]['id'], str(first.pk))
def test_sort_matching_office_location(self):
self.owner.location = LocationFactory.create(position='10.0, 20.0')
self.owner.save()
first = AssignmentFactory.create(status='full')
ApplicantFactory.create_batch(3, activity=first, status='accepted')
second = AssignmentFactory.create(
status='full',
is_online=False,
location=GeolocationFactory.create(position=Point(20.0, 10.0))
)
ApplicantFactory.create_batch(3, activity=second, status='accepted')
third = AssignmentFactory.create(status='open')
fourth = AssignmentFactory.create(
status='open',
is_online=False,
location=GeolocationFactory.create(position=Point(21.0, 9.0))
)
fifth = AssignmentFactory.create(
status='open',
is_online=False,
location=GeolocationFactory.create(position=Point(20.0, 10.0))
)
response = self.client.get(
self.url + '?sort=popularity',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 5)
self.assertEqual(data['data'][0]['id'], str(fifth.pk))
self.assertEqual(data['data'][1]['id'], str(fourth.pk))
self.assertEqual(data['data'][2]['id'], str(third.pk))
self.assertEqual(data['data'][3]['id'], str(second.pk))
self.assertEqual(data['data'][4]['id'], str(first.pk))
def test_sort_matching_created(self):
first = EventFactory.create(
status='open', created=now() - timedelta(days=7)
)
second = EventFactory.create(
status='open', created=now() - timedelta(days=5)
)
third = EventFactory.create(status='open', created=now() - timedelta(days=1))
response = self.client.get(
self.url + '?sort=popularity',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 3)
self.assertEqual(data['data'][0]['id'], str(third.pk))
self.assertEqual(data['data'][1]['id'], str(second.pk))
self.assertEqual(data['data'][2]['id'], str(first.pk))
def test_sort_matching_combined(self):
theme = ProjectThemeFactory.create()
self.owner.favourite_themes.add(theme)
skill = SkillFactory.create()
self.owner.skills.add(skill)
self.owner.location = LocationFactory.create(position='10.0, 20.0')
self.owner.save()
initiative = InitiativeFactory.create(theme=theme)
first = EventFactory.create(
status='open',
initiative=initiative,
is_online=False
)
second = AssignmentFactory.create(
status='open',
location=GeolocationFactory.create(position=Point(21.0, 9.0)),
initiative=initiative,
is_online=False
)
third = AssignmentFactory.create(
status='open',
location=GeolocationFactory.create(position=Point(21.0, 9.0)),
initiative=initiative,
expertise=skill,
is_online=False
)
response = self.client.get(
self.url + '?sort=popularity',
user=self.owner
)
data = json.loads(response.content)
self.assertEqual(data['meta']['pagination']['count'], 3)
self.assertEqual(data['data'][0]['id'], str(third.pk))
self.assertEqual(data['data'][1]['id'], str(second.pk))
self.assertEqual(data['data'][2]['id'], str(first.pk))
def test_limits(self):
initiative = InitiativeFactory.create()
EventFactory.create_batch(
7,
status='open',
initiative=initiative,
)
response = self.client.get(
self.url + '?page[size]=150',
user=self.owner
)
self.assertEqual(len(response.json()['data']), 7)
response = self.client.get(
self.url + '?page[size]=3',
user=self.owner
)
self.assertEqual(len(response.json()['data']), 3)
class ActivityRelatedImageAPITestCase(BluebottleTestCase):
def setUp(self):
super(ActivityRelatedImageAPITestCase, self).setUp()
self.client = JSONAPITestClient()
self.owner = BlueBottleUserFactory.create()
self.funding = FundingFactory.create(
owner=self.owner,
)
self.related_image_url = reverse('related-activity-image-list')
file_path = './bluebottle/files/tests/files/test-image.png'
with open(file_path, 'rb') as test_file:
response = self.client.post(
reverse('image-list'),
test_file.read(),
content_type="image/png",
format=None,
HTTP_CONTENT_DISPOSITION='attachment; filename="some_file.jpg"',
user=self.owner
)
self.file_data = json.loads(response.content)
def test_create(self):
data = {
'data': {
'type': 'related-activity-images',
'relationships': {
'image': {
'data': {
'type': 'images',
'id': self.file_data['data']['id']
}
},
'resource': {
'data': {
'type': 'activities/fundings',
'id': self.funding.pk,
}
}
}
}
}
response = self.client.post(
self.related_image_url,
data=json.dumps(data),
user=self.owner
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
response.json()['included'][1]['attributes']['links']['large'].split('?')[0],
u'/api/activities/{}/related-image/600'.format(response.json()['data']['id'])
)
def test_create_non_owner(self):
data = {
'data': {
'type': 'related-activity-images',
'relationships': {
'image': {
'data': {
'type': 'images',
'id': self.file_data['data']['id']
}
},
'resource': {
'data': {
'type': 'activities/fundings',
'id': self.funding.pk,
}
}
}
}
}
response = self.client.post(
self.related_image_url,
data=json.dumps(data),
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class ContributionListAPITestCase(BluebottleTestCase):
def setUp(self):
super(ContributionListAPITestCase, self).setUp()
self.client = JSONAPITestClient()
self.user = BlueBottleUserFactory.create()
ParticipantFactory.create_batch(2, user=self.user)
ApplicantFactory.create_batch(2, user=self.user)
DonationFactory.create_batch(2, user=self.user, status='succeeded')
DonationFactory.create_batch(2, user=self.user, status='new')
ParticipantFactory.create()
ApplicantFactory.create()
DonationFactory.create()
self.url = reverse('contribution-list')
def test_get(self):
response = self.client.get(
self.url,
user=self.user
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.json()
self.assertEqual(len(data['data']), 6)
for contribution in data['data']:
self.assertTrue(
contribution['type'] in (
'contributions/applicants',
'contributions/participants',
'contributions/donations'
)
)
self.assertTrue(
contribution['relationships']['activity']['data']['type'] in (
'activities/fundings',
'activities/events',
'activities/assignments'
)
)
for i in data['included']:
if i['type'] == 'activities/events':
self.assertTrue('start' in i['attributes'])
self.assertTrue('duration' in i['attributes'])
self.assertTrue('slug' in i['attributes'])
self.assertTrue('title' in i['attributes'])
if i['type'] == 'activities/assignments':
self.assertTrue('date' in i['attributes'])
self.assertTrue('end-date-type' in i['attributes'])
self.assertTrue('slug' in i['attributes'])
self.assertTrue('title' in i['attributes'])
if i['type'] == 'activities/funding':
self.assertTrue('slug' in i['attributes'])
self.assertTrue('title' in i['attributes'])
def test_get_anonymous(self):
response = self.client.get(
self.url
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_get_other_user(self):
response = self.client.get(
self.url,
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.json()
self.assertEqual(len(data['data']), 0)
@override_settings(
ELASTICSEARCH_DSL_AUTOSYNC=True,
ELASTICSEARCH_DSL_AUTO_REFRESH=True
)
@tag('elasticsearch')
class ActivityAPIAnonymizationTestCase(ESTestCase, BluebottleTestCase):
def setUp(self):
super(ActivityAPIAnonymizationTestCase, self).setUp()
self.member_settings = MemberPlatformSettings.load()
self.client = JSONAPITestClient()
self.owner = BlueBottleUserFactory.create()
last_year = now() - timedelta(days=400)
self.old_event = EventFactory.create(
created=last_year,
status='open'
)
ParticipantFactory.create(
activity=self.old_event,
created=last_year
)
ParticipantFactory.create(
activity=self.old_event
)
self.new_event = EventFactory.create(
status='open'
)
ParticipantFactory.create(
activity=self.new_event,
created=last_year
)
ParticipantFactory.create(
activity=self.new_event
)
self.new_url = reverse('event-detail', args=(self.new_event.id,))
self.old_url = reverse('event-detail', args=(self.old_event.id,))
def _get_members(self, data):
return [item for item in data['included'] if item['type'] == 'members' and item['attributes']['first-name']]
def _get_anonymous(self, data):
return [item for item in data['included'] if item['type'] == 'members' and item['attributes']['is-anonymous']]
def test_no_max_age(self):
response = self.client.get(self.old_url, user=self.owner)
data = json.loads(response.content)
members = self._get_members(data)
anonymous = self._get_anonymous(data)
self.assertEqual(len(members), 3)
self.assertEqual(len(anonymous), 0)
response = self.client.get(self.new_url, user=self.owner)
data = json.loads(response.content)
members = self._get_members(data)
anonymous = self._get_anonymous(data)
self.assertEqual(len(members), 3)
self.assertEqual(len(anonymous), 0)
def test_max_age(self):
self.member_settings.anonymization_age = 300
self.member_settings.save()
response = self.client.get(self.old_url, user=self.owner)
data = json.loads(response.content)
members = self._get_members(data)
anonymous = self._get_anonymous(data)
self.assertEqual(len(members), 1)
self.assertEqual(len(anonymous), 2)
response = self.client.get(self.new_url, user=self.owner)
data = json.loads(response.content)
members = self._get_members(data)
anonymous = self._get_anonymous(data)
self.assertEqual(len(members), 2)
self.assertEqual(len(anonymous), 1)
|
XenoXilus/dl | adv/summer_patia.py | from core.advbase import *
def module():
return Summer_Patia
class Summer_Patia(Adv):
comment = 'cannot build combo for Cat Sith; uses up 15 stacks by 46.94s'
conf = {}
conf['slots.a'] = ['Kung_Fu_Masters', 'The_Plaguebringer']
conf['slots.poison.a'] = conf['slots.a']
conf['slots.d'] = 'Shinobi'
conf['acl'] = """
# use dragon if using Cat Sith
# `dragon(c3-s-end), fsc
`s3, not buff(s3)
`s1, fsc
`s2, fsc
`s4, fsc
`dodge, fsc
`fs3
"""
conf['coabs'] = ['Summer_Patia', 'Blade', 'Wand', 'Curran']
conf['share'] = ['Curran']
def d_slots(self):
if self.duration <= 120:
self.conf['slots.d'] = 'Gala_Cat_Sith'
if __name__ == '__main__':
from core.simulate import test_with_argv
test_with_argv(None, *sys.argv) |
lechium/tvOS124Headers | System/Library/PrivateFrameworks/HomeKitDaemon.framework/HMDCloudCache.h | <gh_stars>1-10
/*
* This header is generated by classdump-dyld 1.0
* on Saturday, August 24, 2019 at 9:49:02 PM Mountain Standard Time
* Operating System: Version 12.4 (Build 16M568)
* Image Source: /System/Library/PrivateFrameworks/HomeKitDaemon.framework/HomeKitDaemon
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>.
*/
#import <HMFoundation/HMFObject.h>
@protocol OS_dispatch_queue;
@class HMDBackingStore, HMDCloudLegacyZone, HMDCloudMetadataZone, HMDCloudHomeManagerZone, NSMutableDictionary, NSObject;
@interface HMDCloudCache : HMFObject {
HMDBackingStore* _backingStore;
HMDCloudLegacyZone* _legacyZone;
HMDCloudMetadataZone* _metadataZone;
HMDCloudHomeManagerZone* _homeManagerZone;
NSMutableDictionary* _homeZones;
NSObject*<OS_dispatch_queue> _workQueue;
}
@property (nonatomic,retain) NSMutableDictionary * homeZones; //@synthesize homeZones=_homeZones - In the implementation block
@property (nonatomic,retain) NSObject*<OS_dispatch_queue> workQueue; //@synthesize workQueue=_workQueue - In the implementation block
@property (assign,nonatomic,__weak) HMDBackingStore * backingStore; //@synthesize backingStore=_backingStore - In the implementation block
@property (nonatomic,readonly) HMDCloudLegacyZone * legacyZone; //@synthesize legacyZone=_legacyZone - In the implementation block
@property (nonatomic,readonly) HMDCloudMetadataZone * metadataZone; //@synthesize metadataZone=_metadataZone - In the implementation block
@property (nonatomic,readonly) HMDCloudHomeManagerZone * homeManagerZone; //@synthesize homeManagerZone=_homeManagerZone - In the implementation block
-(NSObject*<OS_dispatch_queue>)workQueue;
-(void)setWorkQueue:(NSObject*<OS_dispatch_queue>)arg1 ;
-(HMDBackingStore *)backingStore;
-(void)setBackingStore:(HMDBackingStore *)arg1 ;
-(HMDCloudLegacyZone *)legacyZone;
-(HMDCloudMetadataZone *)metadataZone;
-(HMDCloudHomeManagerZone *)homeManagerZone;
-(void)homeZoneWithName:(id)arg1 owner:(id)arg2 completion:(/*^block*/id)arg3 ;
-(void)deleteHomeZoneWithName:(id)arg1 ;
-(id)allHomeZones;
-(void)deleteAllZones;
-(id)initWithBackingStore:(id)arg1 workQueue:(id)arg2 ;
-(void)createAndFetchZonesFromBackingStore:(/*^block*/id)arg1 ;
-(BOOL)homeZoneExists:(id)arg1 ;
-(id)_findStoreCacheZone:(id)arg1 byName:(id)arg2 ;
-(void)_createZones:(id)arg1 completion:(/*^block*/id)arg2 ;
-(NSMutableDictionary *)homeZones;
-(void)setHomeZones:(NSMutableDictionary *)arg1 ;
-(id)init;
@end
|
malharlakdawala/DevelopersInstitute | Week3/Day1/Exc3/scripts.js | <reponame>malharlakdawala/DevelopersInstitute
// retrieve the div
let divContainer = document.getElementById("container");
// retrieve the h1 inside the div
let allTitle = divContainer.getElementsByTagName("h1");
let allTitle2 = document.getElementById("container").getElementsByTagName("h1");
// retrieve with querySelector
let divFirstContainer = document.querySelector("#container");
console.log(divFirstContainer);
// retrieve the first element that matches the selection
let firstTitle = document.querySelector("#container > h1");
console.log(firstTitle);
let allTitleSelector = document.querySelectorAll("#container > h1");
console.log(allTitleSelector[1]);
|
ziransun/wpt | WebCryptoAPI/encrypt_decrypt/rsa.js | <filename>WebCryptoAPI/encrypt_decrypt/rsa.js
function run_test() {
var subtle = self.crypto.subtle; // Change to test prefixed implementations
// When are all these tests really done? When all the promises they use have resolved.
var all_promises = [];
// Source file rsa_vectors.js provides the getTestVectors method
// for the RSA-OAEP algorithm that drives these tests.
var vectors = getTestVectors();
var passingVectors = vectors.passing;
var failingVectors = vectors.failing;
// Test decryption, first, because encryption tests rely on that working
passingVectors.forEach(function(vector) {
var promise = importVectorKeys(vector, ["encrypt"], ["decrypt"])
.then(function(vectors) {
// Get a one byte longer plaintext to encrypt
if (!("ciphertext" in vector)) {
return;
}
promise_test(function(test) {
return subtle.decrypt(vector.algorithm, vector.privateKey, vector.ciphertext)
.then(function(plaintext) {
assert_true(equalBuffers(plaintext, vector.plaintext, "Decryption works"));
}, function(err) {
assert_unreached("Decryption should not throw error " + vector.name + ": " + err.message + "'");
});
}, vector.name + " decryption");
}, function(err) {
// We need a failed test if the importVectorKey operation fails, so
// we know we never tested encryption
promise_test(function(test) {
assert_unreached("importVectorKeys failed for " + vector.name + ". Message: ''" + err.message + "''");
}, "importVectorKeys step: " + vector.name + " decryption");
});
all_promises.push(promise);
});
// Test decryption with an altered buffer
passingVectors.forEach(function(vector) {
var promise = importVectorKeys(vector, ["encrypt"], ["decrypt"])
.then(function(vectors) {
// Get a one byte longer plaintext to encrypt
if (!("ciphertext" in vector)) {
return;
}
promise_test(function(test) {
var ciphertext = copyBuffer(vector.ciphertext);
var operation = subtle.decrypt(vector.algorithm, vector.privateKey, ciphertext)
.then(function(plaintext) {
assert_true(equalBuffers(plaintext, vector.plaintext, "Decryption works"));
}, function(err) {
assert_unreached("Decryption should not throw error " + vector.name + ": " + err.message + "'");
});
ciphertext[0] = 255 - ciphertext[0];
return operation;
}, vector.name + " decryption with altered ciphertext");
}, function(err) {
// We need a failed test if the importVectorKey operation fails, so
// we know we never tested encryption
promise_test(function(test) {
assert_unreached("importVectorKeys failed for " + vector.name + ". Message: ''" + err.message + "''");
}, "importVectorKeys step: " + vector.name + " decryption with altered ciphertext");
});
all_promises.push(promise);
});
// Check for failures due to using publicKey to decrypt.
passingVectors.forEach(function(vector) {
var promise = importVectorKeys(vector, ["encrypt"], ["decrypt"])
.then(function(vectors) {
promise_test(function(test) {
return subtle.decrypt(vector.algorithm, vector.publicKey, vector.ciphertext)
.then(function(plaintext) {
assert_unreached("Should have thrown error for using publicKey to decrypt in " + vector.name + ": " + err.message + "'");
}, function(err) {
assert_equals(err.name, "InvalidAccessError", "Should throw InvalidAccessError instead of " + err.message);
});
}, vector.name + " using publicKey to decrypt");
}, function(err) {
// We need a failed test if the importVectorKey operation fails, so
// we know we never tested encryption
promise_test(function(test) {
assert_unreached("importVectorKeys failed for " + vector.name + ". Message: ''" + err.message + "''");
}, "importVectorKeys step: " + vector.name + " using publicKey to decrypt");
});
all_promises.push(promise);
});
// Check for failures due to no "decrypt" usage.
passingVectors.forEach(function(originalVector) {
var vector = Object.assign({}, originalVector);
var promise = importVectorKeys(vector, ["encrypt"], ["unwrapKey"])
.then(function(vectors) {
// Get a one byte longer plaintext to encrypt
promise_test(function(test) {
return subtle.decrypt(vector.algorithm, vector.publicKey, vector.ciphertext)
.then(function(plaintext) {
assert_unreached("Should have thrown error for no decrypt usage in " + vector.name + ": " + err.message + "'");
}, function(err) {
assert_equals(err.name, "InvalidAccessError", "Should throw InvalidAccessError instead of " + err.message);
});
}, vector.name + " no decrypt usage");
}, function(err) {
// We need a failed test if the importVectorKey operation fails, so
// we know we never tested encryption
promise_test(function(test) {
assert_unreached("importVectorKeys failed for " + vector.name + ". Message: ''" + err.message + "''");
}, "importVectorKeys step: " + vector.name + " no decrypt usage");
});
all_promises.push(promise);
});
// Check for successful encryption even if plaintext is altered after call.
passingVectors.forEach(function(vector) {
var promise = importVectorKeys(vector, ["encrypt"], ["decrypt"])
.then(function(vectors) {
promise_test(function(test) {
var plaintext = copyBuffer(vector.plaintext);
var operation = subtle.encrypt(vector.algorithm, vector.publicKey, plaintext)
.then(function(ciphertext) {
assert_equals(ciphertext.byteLength * 8, vector.privateKey.algorithm.modulusLength, "Ciphertext length matches modulus length");
// Can we get the original plaintext back via decrypt?
return subtle.decrypt(vector.algorithm, vector.privateKey, ciphertext)
.then(function(result) {
assert_true(equalBuffers(result, vector.plaintext), "Round trip returns original plaintext");
return ciphertext;
}, function(err) {
assert_unreached("decrypt error for test " + vector.name + ": " + err.message + "'");
});
})
.then(function(priorCiphertext) {
// Will a second encrypt give us different ciphertext, as it should?
return subtle.encrypt(vector.algorithm, vector.publicKey, vector.plaintext)
.then(function(ciphertext) {
assert_false(equalBuffers(priorCiphertext, ciphertext), "Two encrypts give different results")
}, function(err) {
assert_unreached("second time encrypt error for test " + vector.name + ": '" + err.message + "'");
});
}, function(err) {
assert_unreached("decrypt error for test " + vector.name + ": '" + err.message + "'");
});
plaintext[0] = 255 - plaintext[0];
return operation;
}, vector.name + " with altered plaintext");
}, function(err) {
// We need a failed test if the importVectorKey operation fails, so
// we know we never tested encryption
promise_test(function(test) {
assert_unreached("importVectorKeys failed for " + vector.name + ". Message: ''" + err.message + "''");
}, "importVectorKeys step: " + vector.name + " with altered plaintext");
});
all_promises.push(promise);
});
// Check for successful encryption.
passingVectors.forEach(function(vector) {
var promise = importVectorKeys(vector, ["encrypt"], ["decrypt"])
.then(function(vectors) {
promise_test(function(test) {
return subtle.encrypt(vector.algorithm, vector.publicKey, vector.plaintext)
.then(function(ciphertext) {
assert_equals(ciphertext.byteLength * 8, vector.privateKey.algorithm.modulusLength, "Ciphertext length matches modulus length");
// Can we get the original plaintext back via decrypt?
return subtle.decrypt(vector.algorithm, vector.privateKey, ciphertext)
.then(function(result) {
assert_true(equalBuffers(result, vector.plaintext), "Round trip returns original plaintext");
return ciphertext;
}, function(err) {
assert_unreached("decrypt error for test " + vector.name + ": " + err.message + "'");
});
})
.then(function(priorCiphertext) {
// Will a second encrypt give us different ciphertext, as it should?
return subtle.encrypt(vector.algorithm, vector.publicKey, vector.plaintext)
.then(function(ciphertext) {
assert_false(equalBuffers(priorCiphertext, ciphertext), "Two encrypts give different results")
}, function(err) {
assert_unreached("second time encrypt error for test " + vector.name + ": '" + err.message + "'");
});
}, function(err) {
assert_unreached("decrypt error for test " + vector.name + ": '" + err.message + "'");
});
}, vector.name);
}, function(err) {
// We need a failed test if the importVectorKey operation fails, so
// we know we never tested encryption
promise_test(function(test) {
assert_unreached("importVectorKeys failed for " + vector.name + ". Message: ''" + err.message + "''");
}, "importVectorKeys step: " + vector.name);
});
all_promises.push(promise);
});
// Check for failures due to too long plaintext.
passingVectors.forEach(function(vector) {
var promise = importVectorKeys(vector, ["encrypt"], ["decrypt"])
.then(function(vectors) {
// Get a one byte longer plaintext to encrypt
var plaintext = new Uint8Array(vector.plaintext.byteLength + 1);
plaintext.set(plaintext, 0);
plaintext.set(new Uint8Array([32]), vector.plaintext.byteLength);
promise_test(function(test) {
return subtle.encrypt(vector.algorithm, vector.publicKey, plaintext)
.then(function(ciphertext) {
assert_unreached("Should have thrown error for too long plaintext in " + vector.name + ": " + err.message + "'");
}, function(err) {
assert_equals(err.name, "OperationError", "Should throw OperationError instead of " + err.message);
});
}, vector.name + " too long plaintext");
}, function(err) {
// We need a failed test if the importVectorKey operation fails, so
// we know we never tested encryption
promise_test(function(test) {
assert_unreached("importVectorKeys failed for " + vector.name + ". Message: ''" + err.message + "''");
}, "importVectorKeys step: " + vector.name + " too long plaintext");
});
all_promises.push(promise);
});
// Check for failures due to using privateKey to encrypt.
passingVectors.forEach(function(vector) {
var promise = importVectorKeys(vector, ["encrypt"], ["decrypt"])
.then(function(vectors) {
promise_test(function(test) {
return subtle.encrypt(vector.algorithm, vector.privateKey, vector.plaintext)
.then(function(ciphertext) {
assert_unreached("Should have thrown error for using privateKey to encrypt in " + vector.name + ": " + err.message + "'");
}, function(err) {
assert_equals(err.name, "InvalidAccessError", "Should throw InvalidAccessError instead of " + err.message);
});
}, vector.name + " using privateKey to encrypt");
}, function(err) {
// We need a failed test if the importVectorKey operation fails, so
// we know we never tested encryption
promise_test(function(test) {
assert_unreached("importVectorKeys failed for " + vector.name + ". Message: ''" + err.message + "''");
}, "importVectorKeys step: " + vector.name + " using privateKey to encrypt");
});
all_promises.push(promise);
});
// Check for failures due to no "encrypt usage".
passingVectors.forEach(function(originalVector) {
var vector = Object.assign({}, originalVector);
var promise = importVectorKeys(vector, [], ["decrypt"])
.then(function(vectors) {
// Get a one byte longer plaintext to encrypt
promise_test(function(test) {
return subtle.encrypt(vector.algorithm, vector.publicKey, vector.plaintext)
.then(function(ciphertext) {
assert_unreached("Should have thrown error for no encrypt usage in " + vector.name + ": " + err.message + "'");
}, function(err) {
assert_equals(err.name, "InvalidAccessError", "Should throw InvalidAccessError instead of " + err.message);
});
}, vector.name + " no encrypt usage");
}, function(err) {
// We need a failed test if the importVectorKey operation fails, so
// we know we never tested encryption
promise_test(function(test) {
assert_unreached("importVectorKeys failed for " + vector.name + ". Message: ''" + err.message + "''");
}, "importVectorKeys step: " + vector.name + " no encrypt usage");
});
all_promises.push(promise);
});
Promise.all(all_promises)
.then(function() {done();})
.catch(function() {done();})
// A test vector has all needed fields for encryption, EXCEPT that the
// key field may be null. This function replaces that null with the Correct
// CryptoKey object.
//
// Returns a Promise that yields an updated vector on success.
function importVectorKeys(vector, publicKeyUsages, privateKeyUsages) {
var publicPromise, privatePromise;
if (vector.publicKey !== null) {
publicPromise = new Promise(function(resolve, reject) {
resolve(vector);
});
} else {
publicPromise = subtle.importKey(vector.publicKeyFormat, vector.publicKeyBuffer, {name: vector.algorithm.name, hash: vector.hash}, false, publicKeyUsages)
.then(function(key) {
vector.publicKey = key;
return vector;
}); // Returns a copy of the sourceBuffer it is sent.
function copyBuffer(sourceBuffer) {
var source = new Uint8Array(sourceBuffer);
var copy = new Uint8Array(sourceBuffer.byteLength)
for (var i=0; i<source.byteLength; i++) {
copy[i] = source[i];
}
return copy;
}
}
if (vector.privateKey !== null) {
privatePromise = new Promise(function(resolve, reject) {
resolve(vector);
});
} else {
privatePromise = subtle.importKey(vector.privateKeyFormat, vector.privateKeyBuffer, {name: vector.algorithm.name, hash: vector.hash}, false, privateKeyUsages)
.then(function(key) {
vector.privateKey = key;
return vector;
});
}
return Promise.all([publicPromise, privatePromise]);
}
// Returns a copy of the sourceBuffer it is sent.
function copyBuffer(sourceBuffer) {
var source = new Uint8Array(sourceBuffer);
var copy = new Uint8Array(sourceBuffer.byteLength)
for (var i=0; i<source.byteLength; i++) {
copy[i] = source[i];
}
return copy;
}
function equalBuffers(a, b) {
if (a.byteLength !== b.byteLength) {
return false;
}
var aBytes = new Uint8Array(a);
var bBytes = new Uint8Array(b);
for (var i=0; i<a.byteLength; i++) {
if (aBytes[i] !== bBytes[i]) {
return false;
}
}
return true;
}
return;
}
|
GravisZro/MuditaOS | module-services/service-fileindexer/Common.hpp | // Copyright (c) 2017-2022, Mudita <NAME>.o.o. All rights reserved.
// For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md
#pragma once
#include <array>
#include <algorithm>
#include <filesystem>
#include <array>
namespace service::detail
{
namespace fs = std::filesystem;
// File extensions indexing allow list
constexpr std::array<std::string_view, 3> allowed_exts{".wav", ".mp3", ".flac"};
// This is debug feature, it overrides allowed extension types above
constexpr bool allow_all_exts = false;
inline bool isExtSupported(const fs::path &path)
{
if (allow_all_exts) {
return true;
}
return std::any_of(allowed_exts.begin(), allowed_exts.end(), [&path](const fs::path &ext) {
if (path.has_extension() && path.extension() == ext) {
return true;
}
// if empty string with extension only
if (path == ext) {
return true;
}
return false;
});
}
} // namespace service::detail
|
1298698045/react-web | node_modules/@antv/g6/lib/controller/animate.js | <reponame>1298698045/react-web
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; var ownKeys = Object.keys(source); if (typeof Object.getOwnPropertySymbols === 'function') { ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function (sym) { return Object.getOwnPropertyDescriptor(source, sym).enumerable; })); } ownKeys.forEach(function (key) { _defineProperty(target, key, source[key]); }); } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
/**
* @fileOverview animate controller
* @author <EMAIL>
*/
var Base = require('./base');
var Animation = require('../animation/');
var Util = require('../util/');
var Global = require('../global');
var INVALID_ATTRS = ['matrix', 'fillStyle', 'strokeStyle', 'endArrow', 'startArrow'];
var Controller =
/*#__PURE__*/
function (_Base) {
_inheritsLoose(Controller, _Base);
function Controller() {
return _Base.apply(this, arguments) || this;
}
var _proto = Controller.prototype;
_proto.getDefaultCfg = function getDefaultCfg() {
return {
/**
* show animate
* @type {function|string}
*/
show: 'scaleIn',
/**
* hide animate
* @type {function|string}
*/
hide: 'scaleOut',
/**
* enter animate
* @type {function|string}
*/
enter: 'scaleIn',
/**
* leave animate
* @type {function|string}
*/
leave: 'scaleOut',
/**
* update animate
* @type {function}
*/
update: function update(_ref) {
var element = _ref.element,
endKeyFrame = _ref.endKeyFrame;
var props = endKeyFrame.props;
element.animate(_objectSpread({
matrix: props.matrix
}, props.attrs), Global.updateDuration, Global.updateEasing);
},
graph: null,
startCache: {},
endCache: {},
keykeyCache: {}
};
};
_proto._init = function _init() {
var _this = this;
var graph = this.graph;
var keykeyCache = this.keykeyCache;
graph.on('afteritemdraw', function (_ref2) {
var item = _ref2.item;
var group = item.getGraphicGroup();
group.deepEach(function (element) {
keykeyCache[element.gid] = _this._getCache(element);
}, true);
});
};
_proto.cacheGraph = function cacheGraph(cacheType, affectedItemIds) {
var _this2 = this;
var graph = this.graph;
var items;
if (affectedItemIds) {
items = affectedItemIds.map(function (affectedItemId) {
return graph.find(affectedItemId);
});
} else {
items = graph.getItems();
}
this[cacheType] = {};
items.forEach(function (item) {
item && _this2.cache(item, _this2[cacheType], cacheType);
});
};
_proto._getCache = function _getCache(element) {
var keykeyCache = this.keykeyCache;
if (!Util.isObject(element)) {
return keykeyCache[element];
}
var cache = {
props: {
matrix: Util.clone(element.getMatrix()),
attrs: {}
}
};
if (element.isShape) {
var attrs = element.attr();
attrs = Util.omit(attrs, INVALID_ATTRS);
cache.props.attrs = Util.clone(attrs);
}
return cache;
}
/**
* get animate
* @param {object} item - item
* @param {string} type - animate type could be `show`, `hide`, `enter`, `leave`, 'update'
* @return {function} animate function
*/
;
_proto._getAnimation = function _getAnimation(item, type) {
var graph = this.graph;
var shapeObj = item.shapeObj;
var defaultAnimation = this[type];
var shapeAnimation = shapeObj[type + 'Animation'] || shapeObj[type + 'Animate']; // compatible with Animate
var graphAnimate = graph.get('_' + type + 'Animation');
var animation = shapeAnimation || graphAnimate || defaultAnimation;
return Util.isString(animation) ? Animation[type + Util.upperFirst(animation)] : animation;
};
_proto.cache = function cache(item, _cache, type) {
var _this3 = this;
var group = item.getGraphicGroup();
group.deepEach(function (element) {
var id = element.gid;
var subCache = type === 'startCache' ? _this3._getCache(element) : _this3._getCache(element.gid);
subCache.enterAnimate = _this3._getAnimation(item, 'enter');
subCache.leaveAnimate = _this3._getAnimation(item, 'leave');
subCache.showAnimate = _this3._getAnimation(item, 'show');
subCache.hideAnimate = _this3._getAnimation(item, 'hide');
subCache.updateAnimate = _this3._getAnimation(item, 'update');
subCache.item = item;
subCache.element = element;
subCache.visible = element.get('visible');
_cache[id] = subCache;
}, true);
};
_proto._compare = function _compare() {
var startCache = this.startCache;
var endCache = this.endCache;
var enterElements = [];
var leaveElements = [];
var updateElements = [];
var hideElements = [];
var showElements = [];
Util.each(endCache, function (endKeyFrame, k) {
var startKeyFrame = startCache[k];
if (startKeyFrame) {
if (startKeyFrame.element.get('type') === endKeyFrame.element.get('type')) {
if (startKeyFrame.visible && endKeyFrame.visible) {
updateElements.push(k);
} else if (startKeyFrame.visible && !endKeyFrame.visible) {
hideElements.push(k);
} else if (!startKeyFrame.visible && endKeyFrame.visible) {
showElements.push(k);
}
}
} else {
enterElements.push(k);
}
});
Util.each(startCache, function (v, k) {
if (!endCache[k]) {
leaveElements.push(k);
}
});
this.enterElements = enterElements;
this.leaveElements = leaveElements;
this.updateElements = updateElements;
this.hideElements = hideElements;
this.showElements = showElements;
};
_proto._addTween = function _addTween() {
var enterElements = this.enterElements;
var leaveElements = this.leaveElements;
var updateElements = this.updateElements;
var hideElements = this.hideElements;
var showElements = this.showElements;
var startCache = this.startCache;
var endCache = this.endCache; // console.log('enterElements ==> ', enterElements);
// console.log('leaveElements ==> ', leaveElements);
// console.log('updateElements ==> ', updateElements);
// console.log('hideElements ==> ', hideElements);
// console.log('showElements ==> ', showElements);
enterElements.forEach(function (id) {
var endKeyFrame = endCache[id];
var enterAnimate = endKeyFrame.enterAnimate;
if (enterAnimate) {
enterAnimate({
element: endKeyFrame.element,
item: endKeyFrame.item,
endKeyFrame: endKeyFrame,
startKeyFrame: null,
startCache: startCache,
endCache: endCache,
done: function done() {}
});
}
});
leaveElements.forEach(function (id) {
var startKeyFrame = startCache[id];
var leaveAnimate = startKeyFrame.leaveAnimate;
if (leaveAnimate) {
var startElement = startCache[id].element;
if (startElement.isItemContainer) {
startElement.getParent().add(startElement);
}
leaveAnimate({
element: startElement,
item: startKeyFrame.item,
endKeyFrame: null,
startKeyFrame: startKeyFrame,
startCache: startCache,
endCache: endCache,
done: function done() {
if (startElement.isItemContainer) {
startElement.remove();
}
}
});
}
});
updateElements.forEach(function (id) {
var endKeyFrame = endCache[id];
var startKeyFrame = startCache[id];
var endElement = endKeyFrame.element;
var startElement = startKeyFrame.element;
var startProps = startKeyFrame.props;
var endProps = endKeyFrame.props;
var updateAnimate = endKeyFrame.updateAnimate;
var done = function done() {};
if (startProps.attrs) {
endElement.attr(startProps.attrs);
}
if (!Util.isEqual(startProps.matrix, endProps.matrix)) {
endElement.setMatrix(startProps.matrix);
}
updateAnimate({
element: endElement,
item: endKeyFrame,
endKeyFrame: endKeyFrame,
startKeyFrame: startKeyFrame,
startCache: startCache,
endCache: endCache,
done: done
});
if (startElement !== endElement) {
startElement.remove();
}
});
hideElements.forEach(function (id) {
var endKeyFrame = endCache[id];
var startKeyFrame = startCache[id];
var hideAnimate = endKeyFrame.hideAnimate;
if (hideAnimate) {
endKeyFrame.element.show();
hideAnimate({
element: endKeyFrame.element,
item: endKeyFrame.item,
endKeyFrame: endKeyFrame,
startKeyFrame: startKeyFrame,
startCache: startCache,
endCache: endCache,
done: function done() {
var item = endKeyFrame.item;
var group = item.getGraphicGroup();
!item.visible && group.hide();
}
});
}
});
showElements.forEach(function (id) {
var endKeyFrame = endCache[id];
var startKeyFrame = startCache[id];
var showAnimate = endKeyFrame.showAnimate;
if (showAnimate) {
showAnimate({
element: endKeyFrame.element,
item: endKeyFrame.item,
endKeyFrame: endKeyFrame,
startKeyFrame: startKeyFrame,
startCache: startCache,
endCache: endCache,
done: function done() {}
});
}
});
};
_proto.run = function run() {
if (this.graph.destroyed) {
return;
}
this._compare();
this._addTween();
};
return Controller;
}(Base);
module.exports = Controller; |
javs1287/5Steps_ListReport_Extension | exercises/ex_4/sources/ProjectSample/ZPOEXT.NOTIF.XX/node_modules/@sap/di.code-validation.xml/src/fioriAnalysis/rules/XML_IMAGE_ACCESSIBILITY.js | /*global define*/
if (typeof define !== 'function') {
var define = require('amdefine')(module)
};
define([], function() {
"use strict";
function buildPath(){
return "//ns:Image[(not(@tooltip) or normalize-space(@tooltip)='') "
+ "and (not(@ariaLabelledBy) or normalize-space(@ariaLabelledBy)='') "
+ "and (not(@ariaDescribedBy) or normalize-space(@ariaDescribedBy)='') "
+ "and (not(@alt) or normalize-space(@alt)='')]";
}
return {
id: "XML_IMAGE_ACCESSIBILITY",
category: "Accessibility Error",
path: buildPath(),
errorMsg: "An image must define one of the following attributes: tooltip, ariaLabeledBy, ariaDescribedBy, alt",
helpUrl: "https://help.hana.ondemand.com/webide/frameset.htm?1905c1b21a554f808421be80e6b858d7.html",
validate: function(report, path, nodes){
var result = [];
// console.log("validating (" + this.id + ")");
for(var i = 0; i < nodes.length; i++){
var node = nodes[i];
if(node){
result.push(node);
}
}
return result;
}
};
}); |
gocardless/nandi | spec/nandi/fixtures/example_migrations/20180104120000_my_migration.rb | <gh_stars>100-1000
# frozen_string_literal: true
class MyMigration < Nandi::Migration
def up
add_index :payments, :foo
end
def down
remove_index :payments, :foo
end
end
|
ArtemMaximum/task-manager | src/lib/api-utils.js | const HEADERS = {
'Cache-Control': 'no-cache'
//'Content-Type': 'application/json; charset=utf-8',
//'Accept': 'application/json'
};
/**
* @return {string|boolean}
*/
export const checkExpires = () =>
!readCookie('tokensExpires') ? false : parseInt(readCookie('tokensExpires')) - 3600000 >= 0;
/**
* @param {string|string[]} name of the cookie
* @param c
* @param C
* @param i
* @return {string} cookie value
*/
export const readCookie = (name, c = null, C = null, i = null) => {
let cookies;
if (cookies) {
return cookies[name];
}
c = document.cookie.split('; ');
cookies = {};
for (i = c.length - 1; i >= 0; i--) {
C = c[i].split('=');
cookies[C[0]] = C[1];
}
return cookies[name];
}
export const manageHeaders = (myHeaders) => {
return new Headers(Object.assign({}, HEADERS, myHeaders));
} |
jmdelvalle/fithub-release | app/controllers/pages_controller.rb | class PagesController < ApplicationController
before_action :set_auth
def index
end
def profile
end
def home
if current_user
render 'home'
else
redirect_to root_url
end
end
private
def set_auth
@auth = session[:omniauth] if session[:omniauth]
end
end
|
TheTripleV/GRIP | core/src/main/java/edu/wpi/grip/core/operations/composite/ThresholdOperation.java | <reponame>TheTripleV/GRIP<gh_stars>0
package edu.wpi.grip.core.operations.composite;
import edu.wpi.grip.core.Operation;
import org.bytedeco.javacpp.opencv_core.Mat;
import org.bytedeco.javacpp.opencv_core.Scalar;
public abstract class ThresholdOperation<O extends ThresholdOperation<O>> implements Operation {
protected Mat[] dataArray = {new Mat(), new Mat(), new Mat()};
/**
* @param dataArray The array with the element that should be re-allocated
* @param index The index of the data array that should be inspected
* @param value The value that should be assigned to the mat regardless of being reallocated
* @param input The input matrix that the dataArray element should be compared against
*
* @return Either the old mat with the value assigned or a newly created Matrix.
*/
protected Mat reallocateMatIfInputSizeOrWidthChanged(final Mat[] dataArray, final int index,
final Scalar value, final Mat input) {
if (dataArray[index].size().width() != input.size().width()
|| dataArray[index].size().height() != input.size().height()
|| dataArray[index].type() != input.type()) {
return dataArray[index] = new Mat(input.size(), input.type(), value);
} else {
return dataArray[index].put(value);
}
}
}
|
shonohs/modelutils | modelutils/onnx/add_cast_for_op.py | import argparse
import os
import onnx
import onnx.numpy_helper
def insert_cast_op(model, index, input_name, output_name, to_type):
node_list = list(model.graph.node)
cast_node = onnx.helper.make_node('Cast', [input_name], [output_name], to=getattr(onnx.TensorProto, to_type))
node_list.insert(index, cast_node)
del model.graph.node[:]
model.graph.node.extend(node_list)
print(f"Inserted Cast {input_name} => {output_name}")
def add_cast_for_op(model, index, input_indices, to_type):
for i in input_indices:
node = model.graph.node[index]
input_name = node.input[i]
new_input_name = input_name + '_' + to_type.lower()
node.input[i] = new_input_name
insert_cast_op(model, index, input_name, new_input_name, to_type)
index += 1
def add_cast(input_filename, output_filename, op_type, input_indices, to_type):
if os.path.exists(output_filename):
raise RuntimeError(f"{output_filename} already exists.")
model = onnx.load(input_filename)
done = False
processed_nodes = []
while not done:
for i, node in enumerate(model.graph.node):
if node.op_type == op_type and node.output not in processed_nodes:
add_cast_for_op(model, i, input_indices, to_type)
print(f"Added casts for name: {node.name}, op_type: {node.op_type}")
processed_nodes.append(node.output)
continue
done = True
onnx.checker.check_model(model)
onnx.save(model, output_filename)
def main():
parser = argparse.ArgumentParser("Add Cast operators to the inputs of specified op")
parser.add_argument('input_filepath', type=str)
parser.add_argument('output_filepath', type=str)
parser.add_argument('--op_type', type=str, default='NonMaxSuppression', help="Operator type")
parser.add_argument('--input_indices', type=str, nargs='+', default=[0, 1], help="input indices")
parser.add_argument('--to', type=str, default='FLOAT', help="The data type to which the tensors are cast")
args = parser.parse_args()
add_cast(args.input_filepath, args.output_filepath, args.op_type, args.input_indices, args.to)
if __name__ == '__main__':
main()
|
thiilins/cube-game-store | src/controllers/ProductController.js | <filename>src/controllers/ProductController.js
// Importar os models (produto/categoria)
const ProductController = {
createProductPage(req, res) {
try {
// Categorias = categories
return res.render("admin/dashboard", {
file: "products/create",
page: "Adicionar Produto",
//categories
});
} catch (error) {
console.log(error);
}
},
listProductPage(req, res) {
try {
//produtos = product (include -- categorias)
return res.render("admin/dashboard", {
file: "products/list",
page: "Produtos",
//product
});
} catch (error) {
console.log(error);
}
},
viewProductPage(req, res) {
//produto = product (include -- categorias)
// id
try {
return res.render("admin/dashboard", {
file: "products/view",
page: "",
//product
});
} catch (error) {
console.log(error);
}
},
editProductPage(req, res) {
try { //produto = product (include -- categorias)
// categorias = categories
return res.render("admin/dashboard", {
file: "products/edit",
page: "Editando:",
//product
//categories
});
} catch (error) {
console.log(error);
}
},
async createProduct(req, res) {
try {
const {
nome,
SKU,
fabricante_id,
preco_regular,
preco_promocional,
descricao_curta,
descricao,
altura_embalagem,
largura_embalagem,
comprimento_embalagem,
peso_embalagem,
altura_produto,
largura_produto,
peso_produto,
comprimento_produto,
estoque,
} = req.body;
let imagem_destacada = null;
if (req.file && req.files != "undefined") {
imagem_destacada = req.file.filename;
}
const newProduct = await Product.create({
nome,
SKU,
fabricante_id,
preco_regular,
preco_promocional,
descricao_curta,
descricao,
altura_embalagem,
largura_embalagem,
comprimento_embalagem,
peso_embalagem,
altura_produto,
largura_produto,
peso_produto,
comprimento_produto,
imagem_destacada,
estoque,
vendas: 0,
ativo: 1,
});
return res.redirect("admin/produtos");
} catch (error) {
console.log(error);
}
},
async editProduct(req, res) {
try {
// id
const {
nome,
SKU,
fabricante_id,
preco_regular,
preco_promocional,
descricao_curta,
descricao,
altura_embalagem,
largura_embalagem,
comprimento_embalagem,
peso_embalagem,
altura_produto,
largura_produto,
peso_produto,
comprimento_produto,
estoque,
ativo,
} = req.body;
let imagem_destacada = undefined;
if (req.file && req.files != "undefined") {
imagem_destacada = req.file.filename;
}
const newProduct = await Product.updated({
nome,
SKU,
fabricante_id,
preco_regular,
preco_promocional,
descricao_curta,
descricao,
altura_embalagem,
largura_embalagem,
comprimento_embalagem,
peso_embalagem,
altura_produto,
largura_produto,
peso_produto,
comprimento_produto,
imagem_destacada,
estoque,
ativo,
}, {
where: { id },
});
return res.redirect("admin/produtos");
} catch (error) {
console.log(error);
}
},
async deleteProduct(req, res) {
try {
const {id} = req.params;
const product = await Produto.findByPk(id);
const {vendas} = produto;
if(vendas >= 1){
const disableProduct = await Produto.update({
ativo: 0
}, { where: { id } })
return res.redirect('admin/produtos')
}
const destroyProduct = await Produto.destroy({
where: { id },
})
return res.redirect('admin/produtos')
} catch (error) {
console.log(error);
}
},
};
module.exports = ProductController;
|
danilloricardo/remp | Beam/go/vendor/github.com/olivere/env/example_test.go | package env_test
import (
"flag"
"fmt"
"github.com/olivere/env"
)
func Example() {
var (
// Parse addr from flag, use HTTP_ADDR and ADDR env vars as fallback
addr = flag.String("addr", env.String("127.0.0.1:3000", "HTTP_ADDR", "ADDR"), "Bind to this address")
)
flag.Parse()
fmt.Println(*addr)
// Output: 127.0.0.1:3000
}
|
sting-ioc/sting | processor/src/test/fixtures/input/com/example/multiround/fragment/MyInjector.java | <gh_stars>1-10
package com.example.multiround.fragment;
import sting.Injector;
@Injector( includes = MyFragment.class )
interface MyInjector
{
Object getObject();
}
|
iTeam-co/pytglib | pytglib/api/types/invoice.py |
from ..utils import Object
class Invoice(Object):
"""
Product invoice
Attributes:
ID (:obj:`str`): ``Invoice``
Args:
currency (:obj:`str`):
ISO 4217 currency code
price_parts (List of :class:`telegram.api.types.labeledPricePart`):
A list of objects used to calculate the total price of the product
is_test (:obj:`bool`):
True, if the payment is a test payment
need_name (:obj:`bool`):
True, if the user's name is needed for payment
need_phone_number (:obj:`bool`):
True, if the user's phone number is needed for payment
need_email_address (:obj:`bool`):
True, if the user's email address is needed for payment
need_shipping_address (:obj:`bool`):
True, if the user's shipping address is needed for payment
send_phone_number_to_provider (:obj:`bool`):
True, if the user's phone number will be sent to the provider
send_email_address_to_provider (:obj:`bool`):
True, if the user's email address will be sent to the provider
is_flexible (:obj:`bool`):
True, if the total price depends on the shipping method
Returns:
Invoice
Raises:
:class:`telegram.Error`
"""
ID = "invoice"
def __init__(self, currency, price_parts, is_test, need_name, need_phone_number, need_email_address, need_shipping_address, send_phone_number_to_provider, send_email_address_to_provider, is_flexible, **kwargs):
self.currency = currency # str
self.price_parts = price_parts # list of labeledPricePart
self.is_test = is_test # bool
self.need_name = need_name # bool
self.need_phone_number = need_phone_number # bool
self.need_email_address = need_email_address # bool
self.need_shipping_address = need_shipping_address # bool
self.send_phone_number_to_provider = send_phone_number_to_provider # bool
self.send_email_address_to_provider = send_email_address_to_provider # bool
self.is_flexible = is_flexible # bool
@staticmethod
def read(q: dict, *args) -> "Invoice":
currency = q.get('currency')
price_parts = [Object.read(i) for i in q.get('price_parts', [])]
is_test = q.get('is_test')
need_name = q.get('need_name')
need_phone_number = q.get('need_phone_number')
need_email_address = q.get('need_email_address')
need_shipping_address = q.get('need_shipping_address')
send_phone_number_to_provider = q.get('send_phone_number_to_provider')
send_email_address_to_provider = q.get('send_email_address_to_provider')
is_flexible = q.get('is_flexible')
return Invoice(currency, price_parts, is_test, need_name, need_phone_number, need_email_address, need_shipping_address, send_phone_number_to_provider, send_email_address_to_provider, is_flexible)
|
olofd/react-native-insta-photo-studio | src/effects/ExportPanel.native.js | <filename>src/effects/ExportPanel.native.js
import {View, StyleSheet} from "react-native";
import React, {Component, PropTypes} from "react";
const styles = StyleSheet.create({
root: {
flexDirection: "column",
alignItems: "center",
paddingTop: 20
}
});
export default class ExportPanel extends Component {
render () {
const {children} = this.props;
return <View style={styles.root}>
{children}
</View>;
}
}
ExportPanel.propTypes = {
children: PropTypes.any.isRequired
};
|
mikespux/Task-Attendance-Bluetooth-Fingerprint | app/src/main/java/com/fgtit/fingerprintattendance/activity/EnrollActivity.java | package com.fgtit.fingerprintattendance.activity;
import android.app.Activity;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.design.widget.CoordinatorLayout;
import android.support.design.widget.Snackbar;
import android.support.design.widget.TextInputLayout;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Base64;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.fgtit.fingerprintattendance.R;
import com.fgtit.fingerprintattendance.app.AppConfig;
import com.fgtit.fingerprintattendance.doa.EnrollDataSource;
import com.fgtit.fingerprintattendance.model.EnrollItem;
import com.fgtit.fingerprintattendance.rest.ApiClient;
import com.fgtit.fingerprintattendance.rest.ApiInterface;
import com.fgtit.fingerprintattendance.service.BluetoothService;
import com.fgtit.fingerprintattendance.widget.RoundImage;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Timer;
import java.util.TimerTask;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
public class EnrollActivity extends AppCompatActivity implements View.OnClickListener {
// Debugging
private static final String TAG = CaptureActivity.class.getSimpleName();
private static final boolean D = true;
//private Subscription subscription;
//Observable<EnrollItem> observable;
// Message types sent from the BluetoothChatService Handler
public static final int MESSAGE_STATE_CHANGE = 1;
public static final int MESSAGE_READ = 2;
public static final int MESSAGE_WRITE = 3;
public static final int MESSAGE_DEVICE_NAME = 4;
public static final int MESSAGE_TOAST = 5;
// Key names received from the BluetoothChatService Handler
public static final String DEVICE_NAME = "device_name";
public static final String TOAST = "toast";
// Intent request codes
private static final int REQUEST_CONNECT_DEVICE = 1;
private static final int REQUEST_ENABLE_BT = 2;
private static final int REQUEST_CAMERA = 3;
// Name of the connected device
private String mConnectedDeviceName = null;
// String buffer for outgoing messages
private StringBuffer mOutStringBuffer;
// Local Bluetooth adapter
private BluetoothAdapter mBluetoothAdapter = null;
// Member object for the chat services
private BluetoothService mBluetoothService = null;
private final static byte CMD_GETBAT = 0x21;
private final static byte CMD_GETIMAGE = 0x30;
private final static byte CMD_GETCHAR = 0x31;
private byte mDeviceCmd = 0x00;
private boolean mIsWork = false;
private byte mCmdData[] = new byte[10240];
private int mCmdSize = 0;
private Timer mTimerTimeout = null;
private TimerTask mTaskTimeout = null;
private Handler mHandlerTimeout;
public byte mMatData[] = new byte[512];
public int mMatSize = 0;
public byte mBat[] = new byte[2];
public byte mUpImage[] = new byte[73728];//36864
public int mUpImageSize = 0;
private int whichThumb = 0;
private String dataImage;
private String dataLeftThumb;
private String dataRightThumb;
private boolean isCaptureImage;
private boolean isEnrollLeftThumb;
private boolean isEnrollRightThumb;
private MenuItem bluetoothMenu;
private ImageView enrollImage, leftThumbImage, rightThumbImage;
private Button enrollButton;
private EditText inputFirstName, inputLastName;
private ProgressBar progressBar;
private TextInputLayout inputLayoutFirstName, inputLayoutLastName;
private EnrollDataSource enrollDataSource;
private JSONObject parameters;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_enroll);
Toolbar toolbar = (Toolbar) findViewById(R.id.enroll_toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayShowHomeEnabled(true);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_PORTRAIT);
//this.subscription = observable.subscribe(this);
enrollDataSource = new EnrollDataSource(this);
inputLayoutFirstName = (TextInputLayout) findViewById(R.id.input_enroll_layout_first_name);
inputLayoutLastName = (TextInputLayout) findViewById(R.id.input_enroll_layout_last_name);
inputFirstName = (EditText) findViewById(R.id.input_enroll_first_name);
inputFirstName.addTextChangedListener(new MyTextWatcher(inputFirstName));
inputLastName = (EditText) findViewById(R.id.input_enroll_last_name);
inputLastName.addTextChangedListener(new MyTextWatcher(inputLastName));
enrollImage = (ImageView) findViewById(R.id.enroll_image);
enrollImage.setOnClickListener(this);
leftThumbImage = (ImageView) findViewById(R.id.img_left_thumb);
leftThumbImage.setOnClickListener(this);
rightThumbImage = (ImageView) findViewById(R.id.img_right_thumb);
rightThumbImage.setOnClickListener(this);
enrollButton = (Button) findViewById(R.id.btn_enroll);
enrollButton.setOnClickListener(this);
progressBar = (ProgressBar) findViewById(R.id.enroll_progressBar);
// Get local Bluetooth adapter
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
// If the adapter is null, then Bluetooth is not supported
if (mBluetoothAdapter == null) {
showSnackBar("Bluetooth is not available");
finish();
}
}
private void checkBluetoothStatus() {
if (mBluetoothService != null) {
// Only if the state is STATE_NONE, do we know that we haven't started already
switch (mBluetoothService.getState()) {
case BluetoothService.STATE_CONNECTED:
updateBluetoothStatus(2);
//mConversationArrayAdapter.clear();
break;
case BluetoothService.STATE_CONNECTING:
updateBluetoothStatus(1);
break;
case BluetoothService.STATE_LISTEN:
case BluetoothService.STATE_NONE:
updateBluetoothStatus(0);
break;
}
}
}
@Override
public void onDestroy() {
super.onDestroy();
// Stop the Bluetooth chat services
if (mBluetoothService != null) mBluetoothService.stop();
//this.subscription.unsubscribe();
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.enroll_image:
captureImage();
break;
case R.id.img_left_thumb: {
SendCommand(CMD_GETCHAR, null, 0);
whichThumb = 1;
}
break;
case R.id.img_right_thumb: {
SendCommand(CMD_GETCHAR, null, 0);
whichThumb = 2;
}
break;
case R.id.btn_enroll: {
enroll();
}
break;
default:
break;
}
}
private void blueToothSetup() {
Log.d(TAG, "setupChat()");
mBluetoothService = BluetoothService.getInstance(this, mHandler); // Initialize the BluetoothChatService to perform bluetooth connections
mOutStringBuffer = new StringBuffer(""); // Initialize the buffer for outgoing messages
checkBluetoothStatus();
}
private void bluetoothEnable() {
// If BT is not on, request that it be enabled.
// setupChat() will then be called during onActivityResult
if (!mBluetoothAdapter.isEnabled()) {
Intent enableIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableIntent, REQUEST_ENABLE_BT);
// Otherwise, setup the chat session
} else {
if (mBluetoothService == null) blueToothSetup();
}
}
private int calcCheckSum(byte[] buffer, int size) {
int sum = 0;
for (int i = 0; i < size; i++) {
sum = sum + buffer[i];
}
return (sum & 0x00ff);
}
public void TimeOutStart() {
if (mTimerTimeout != null) {
return;
}
mTimerTimeout = new Timer();
mHandlerTimeout = new Handler() {
@Override
public void handleMessage(Message msg) {
TimeOutStop();
if (mIsWork) {
mIsWork = false;
showSnackBar("Error! Time Out");
}
super.handleMessage(msg);
}
};
mTaskTimeout = new TimerTask() {
@Override
public void run() {
Message message = new Message();
message.what = 1;
mHandlerTimeout.sendMessage(message);
}
};
mTimerTimeout.schedule(mTaskTimeout, 10000, 10000);
}
public void TimeOutStop() {
if (mTimerTimeout != null) {
mTimerTimeout.cancel();
mTimerTimeout = null;
mTaskTimeout.cancel();
mTaskTimeout = null;
}
}
private void SendCommand(byte cmdid, byte[] data, int size) {
if (mIsWork) return;
int sendsize = 9 + size;
byte[] sendbuf = new byte[sendsize];
sendbuf[0] = 'F';
sendbuf[1] = 'T';
sendbuf[2] = 0;
sendbuf[3] = 0;
sendbuf[4] = cmdid;
sendbuf[5] = (byte) (size);
sendbuf[6] = (byte) (size >> 8);
if (size > 0) {
for (int i = 0; i < size; i++) {
sendbuf[7 + i] = data[i];
}
}
int sum = calcCheckSum(sendbuf, (7 + size));
sendbuf[7 + size] = (byte) (sum);
sendbuf[8 + size] = (byte) (sum >> 8);
mIsWork = true;
TimeOutStart();
mDeviceCmd = cmdid;
mCmdSize = 0;
mBluetoothService.write(sendbuf);
switch (sendbuf[4]) {
case CMD_GETBAT:
//AddStatusList("Get Battery Value ...");
break;
case CMD_GETIMAGE:
mUpImageSize = 0;
showSnackBar("Processing...");
break;
case CMD_GETCHAR:
showSnackBar("Initializing...");
break;
}
}
private byte[] changeByte(int data) {
byte b4 = (byte) ((data) >> 24);
byte b3 = (byte) (((data) << 8) >> 24);
byte b2 = (byte) (((data) << 16) >> 24);
byte b1 = (byte) (((data) << 24) >> 24);
byte[] bytes = {b1, b2, b3, b4};
return bytes;
}
private byte[] toBmpByte(int width, int height, byte[] data) {
byte[] buffer = null;
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
int bfType = 0x424d;
int bfSize = 54 + 1024 + width * height;
int bfReserved1 = 0;
int bfReserved2 = 0;
int bfOffBits = 54 + 1024;
dos.writeShort(bfType);
dos.write(changeByte(bfSize), 0, 4);
dos.write(changeByte(bfReserved1), 0, 2);
dos.write(changeByte(bfReserved2), 0, 2);
dos.write(changeByte(bfOffBits), 0, 4);
int biSize = 40;
int biWidth = width;
int biHeight = height;
int biPlanes = 1;
int biBitcount = 8;
int biCompression = 0;
int biSizeImage = width * height;
int biXPelsPerMeter = 0;
int biYPelsPerMeter = 0;
int biClrUsed = 256;
int biClrImportant = 0;
dos.write(changeByte(biSize), 0, 4);
dos.write(changeByte(biWidth), 0, 4);
dos.write(changeByte(biHeight), 0, 4);
dos.write(changeByte(biPlanes), 0, 2);
dos.write(changeByte(biBitcount), 0, 2);
dos.write(changeByte(biCompression), 0, 4);
dos.write(changeByte(biSizeImage), 0, 4);
dos.write(changeByte(biXPelsPerMeter), 0, 4);
dos.write(changeByte(biYPelsPerMeter), 0, 4);
dos.write(changeByte(biClrUsed), 0, 4);
dos.write(changeByte(biClrImportant), 0, 4);
byte[] palatte = new byte[1024];
for (int i = 0; i < 256; i++) {
palatte[i * 4] = (byte) i;
palatte[i * 4 + 1] = (byte) i;
palatte[i * 4 + 2] = (byte) i;
palatte[i * 4 + 3] = 0;
}
dos.write(palatte);
dos.write(data);
dos.flush();
buffer = baos.toByteArray();
dos.close();
baos.close();
} catch (Exception e) {
e.printStackTrace();
}
return buffer;
}
public byte[] getFingerprintImage(byte[] data, int width, int height) {
if (data == null) {
return null;
}
byte[] imageData = new byte[data.length * 2];
for (int i = 0; i < data.length; i++) {
imageData[i * 2] = (byte) (data[i] & 0xf0);
imageData[i * 2 + 1] = (byte) (data[i] << 4 & 0xf0);
}
byte[] bmpData = toBmpByte(width, height, imageData);
return bmpData;
}
private void memcpy(byte[] dstbuf, int dstoffset, byte[] srcbuf, int srcoffset, int size) {
for (int i = 0; i < size; i++) {
dstbuf[dstoffset + i] = srcbuf[srcoffset + i];
}
}
private void ReceiveCommand(byte[] databuf, int datasize) {
if (mDeviceCmd == CMD_GETIMAGE) {
memcpy(mUpImage, mUpImageSize, databuf, 0, datasize);
mUpImageSize = mUpImageSize + datasize;
if (mUpImageSize >= 15200) {
byte[] bmpdata = getFingerprintImage(mUpImage, 152, 200);
Bitmap bmp = BitmapFactory.decodeByteArray(bmpdata, 0, bmpdata.length);
Drawable roundedImage = new RoundImage(bmp);
if (whichThumb == 1) {
RelativeLayout relativeLayout = (RelativeLayout) findViewById(R.id.left_thumb_relative_layout);
relativeLayout.setBackgroundResource(R.drawable.white_circle);
leftThumbImage.setScaleType(ImageView.ScaleType.FIT_XY);
leftThumbImage.setImageDrawable(roundedImage);
isEnrollLeftThumb = true;
whichThumb = 0;
} else if (whichThumb == 2) {
RelativeLayout relativeLayout = (RelativeLayout) findViewById(R.id.right_thumb_relative_layout);
relativeLayout.setBackgroundResource(R.drawable.white_circle);
rightThumbImage.setScaleType(ImageView.ScaleType.FIT_XY);
rightThumbImage.setImageDrawable(roundedImage);
isEnrollRightThumb = true;
whichThumb = 0;
}
mUpImageSize = 0;
mIsWork = false;
/*
try {
Thread.currentThread();
Thread.sleep(200);
}catch (InterruptedException e){
e.printStackTrace();
}
SendCommand(CMD_GETCHAR,null,0);
*/
showSnackBar("Data Captured");
}
} else {
memcpy(mCmdData, mCmdSize, databuf, 0, datasize);
mCmdSize = mCmdSize + datasize;
int totalsize = (byte) (mCmdData[5]) + ((mCmdData[6] << 8) & 0xFF00) + 9;
if (mCmdSize >= totalsize) {
mCmdSize = 0;
mIsWork = false;
if ((mCmdData[0] == 'F') && (mCmdData[1] == 'T')) {
switch (mCmdData[4]) {
case CMD_GETBAT: {
int size = (byte) (mCmdData[5]) + ((mCmdData[6] << 8) & 0xFF00) - 1;
if (size > 0) {
memcpy(mBat, 0, mCmdData, 8, size);
//AddStatusList("Battery Value:"+Integer.toString(mBat[0]/10)+"."+Integer.toString(mBat[0]%10)+"V");
} else ;
//AddStatusList("Get Battery Value Fail");
}
break;
case CMD_GETCHAR: {
int size = (byte) (mCmdData[5]) + ((mCmdData[6] << 8) & 0xFF00) - 1;
if (mCmdData[7] == 1) {
memcpy(mMatData, 0, mCmdData, 8, size);
mMatSize = size;
if (whichThumb == 1)
dataLeftThumb = Base64.encodeToString(mMatData, 0, mMatSize, Base64.DEFAULT); //hexToString(mMatData,mMatSize);
else if (whichThumb == 2)
dataRightThumb = Base64.encodeToString(mMatData, 0, mMatSize, Base64.DEFAULT); //hexToString(mMatData,mMatSize);
SendCommand(CMD_GETIMAGE, null, 0);
} else
showSnackBar("Error! Getting Data.");
}
break;
}
}
}
}
}
private String hexToString(byte[] data, int size) {
String str = "";
for (int i = 0; i < size; i++) {
str = str + "," + Integer.toHexString(data[i] & 0xFF).toUpperCase();
}
return str;
}
// The Handler that gets information back from the BluetoothService
private final Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MESSAGE_STATE_CHANGE:
if (D) Log.i(TAG, "MESSAGE_STATE_CHANGE: " + msg.arg1);
switch (msg.arg1) {
case BluetoothService.STATE_CONNECTED:
updateBluetoothStatus(2);
//mConversationArrayAdapter.clear();
break;
case BluetoothService.STATE_CONNECTING:
updateBluetoothStatus(1);
break;
case BluetoothService.STATE_LISTEN:
case BluetoothService.STATE_NONE:
updateBluetoothStatus(0);
break;
}
break;
case MESSAGE_WRITE:
byte[] writeBuf = (byte[]) msg.obj;
// construct a string from the buffer
//String writeMessage = new String(writeBuf);
//AddStatusList("Send: " + writeMessage);
//AddStatusListHex(writeBuf,writeBuf.length);
break;
case MESSAGE_READ:
byte[] readBuf = (byte[]) msg.obj;
//AddStatusList("Len="+Integer.toString(msg.arg1));
//AddStatusListHex(readBuf,msg.arg1);
ReceiveCommand(readBuf, msg.arg1);
break;
case MESSAGE_DEVICE_NAME:
// save the connected device's name
mConnectedDeviceName = msg.getData().getString(DEVICE_NAME);
/*Toast.makeText(getApplicationContext(), "Connected to "
+ mConnectedDeviceName, Toast.LENGTH_SHORT).show();*/
showSnackBar("Connected to " + mConnectedDeviceName);
break;
case MESSAGE_TOAST:
/*Toast.makeText(getApplicationContext(), msg.getData().getString(TOAST),
Toast.LENGTH_SHORT).show();*/
showSnackBar(msg.getData().getString(TOAST));
break;
}
}
};
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (D) Log.d(TAG, "onActivityResult " + resultCode);
switch (requestCode) {
case REQUEST_CONNECT_DEVICE:
// When DeviceListActivity returns with a device to connect
if (resultCode == Activity.RESULT_OK) {
// Get the device MAC address
String address = data.getExtras()
.getString(BluetoothActivity.EXTRA_DEVICE_ADDRESS);
// Get the BLuetoothDevice object
BluetoothDevice device = mBluetoothAdapter.getRemoteDevice(address);
// Attempt to connect to the device
mBluetoothService.connect(device);
}
break;
case REQUEST_ENABLE_BT:
// When the request to enable Bluetooth returns
if (resultCode == Activity.RESULT_OK) {
// Bluetooth is now enabled, so set up a chat session
blueToothSetup();
} else {
// User did not enable Bluetooth or an error occured
Log.d(TAG, "BT not enabled");
/*Toast.makeText(this, R.string.bt_not_enabled_leaving, Toast.LENGTH_SHORT).show();*/
showSnackBar(getResources().getString(R.string.bt_not_enabled));
finish();
}
break;
case REQUEST_CAMERA:
if (resultCode == Activity.RESULT_OK) {
Bitmap bmp = (Bitmap) data.getExtras().get("data");
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.JPEG, 100, stream);
byte[] img = stream.toByteArray();
dataImage = Base64.encodeToString(img, Base64.DEFAULT);
//Toast.makeText(this, dataImage, Toast.LENGTH_SHORT).show();
Drawable roundedImage = new RoundImage(bmp);
RelativeLayout relativeLayout = (RelativeLayout) findViewById(R.id.enroll_relative_layout);
relativeLayout.setBackgroundResource(R.drawable.white_circle);
enrollImage.setScaleType(ImageView.ScaleType.FIT_XY);
enrollImage.setImageDrawable(roundedImage);
isCaptureImage = true;
} else {
Log.d(TAG, "Image not captured");
showSnackBar("Image not captured");
}
break;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.dash_board_menu, menu);
bluetoothMenu = menu.findItem(R.id.bluetooth);
bluetoothEnable();
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
switch (id) {
case R.id.bluetooth: {
bluetoothConnect();
}
break;
default:
return true;
}
return super.onOptionsItemSelected(item);
}
private void updateBluetoothStatus(int status) {
switch (status) {
case 0:
bluetoothMenu.setIcon(getResources().getDrawable(R.drawable.ic_bluetooth_disabled));
break;
case 1:
bluetoothMenu.setIcon(getResources().getDrawable(R.drawable.ic_bluetooth_connecting));
break;
case 2:
bluetoothMenu.setIcon(getResources().getDrawable(R.drawable.ic_bluetooth_connected));
break;
}
}
private void bluetoothConnect() {
// Launch the BluetoothActivity to see devices and do scan
Intent serverIntent = new Intent(this, BluetoothActivity.class);
startActivityForResult(serverIntent, REQUEST_CONNECT_DEVICE);
}
private void captureImage() {
Intent cameraIntent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(cameraIntent, REQUEST_CAMERA);
}
private boolean validateInput(EditText EdTxt, TextInputLayout inputLayout) {
if (EdTxt.getText().toString().trim().isEmpty()) {
inputLayout.setError(getString(R.string.err_msg_input));
requestFocus(EdTxt);
return false;
} else {
inputLayout.setErrorEnabled(false);
}
return true;
}
private void requestFocus(View view) {
if (view.requestFocus()) {
getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE);
}
}
private void enroll() {
if (!validateInput(inputFirstName, inputLayoutFirstName)) {
return;
}
if (!validateInput(inputLastName, inputLayoutLastName)) {
return;
}
if (!isCaptureImage) {
showSnackBar("Image not Captured.");
return;
}
if (!isEnrollLeftThumb) {
showSnackBar("Left Thumb not Enrolled");
return;
}
if (!isEnrollRightThumb) {
showSnackBar("Right Thumb not Enrolled");
return;
}
EnrollItem item = new EnrollItem();
item.setImage(dataImage);
item.setFirstName(inputFirstName.getText().toString().trim());
item.setLastName(inputLastName.getText().toString().trim());
item.setLeftThumb(dataLeftThumb);
item.setRightThumb(dataRightThumb);
item.setIsSync(false);
progressBar.setVisibility(View.VISIBLE);
enrollButton.setVisibility(View.GONE);
enrollDataSource.open();
enrollDataSource.createEnroll(item);
enrollDataSource.close();
parameters = payload(dataImage, inputFirstName.getText().toString().trim(),
inputLastName.getText().toString().trim(), dataLeftThumb, dataRightThumb);
submitPayload(payload(dataImage, inputFirstName.getText().toString().trim(),
inputLastName.getText().toString().trim(), dataLeftThumb, dataRightThumb));
}
private JSONObject payload(String image, String firstName, String lastName, String leftThumb, String rightThumb) {
JSONObject obj = new JSONObject();
try {
obj.put("image", image);
obj.put("firstName", firstName);
obj.put("lastName", lastName);
obj.put("leftThumb", leftThumb);
obj.put("rightThumb", rightThumb);
} catch (JSONException e) {
e.printStackTrace();
}
return obj;
}
private void submitPayload(JSONObject payLoad) {
ApiInterface apiService =
ApiClient.getClient().create(ApiInterface.class);
Call<EnrollItem> call = apiService.enroll(payLoad);
call.enqueue(new Callback<EnrollItem>() {
@Override
public void onResponse(Call<EnrollItem>call, Response<EnrollItem> response) {
//response.body().;
}
@Override
public void onFailure(Call<EnrollItem>call, Throwable t) {
// Log error here since request failed
Log.e(TAG, t.toString());
}
});
/*Observable<EnrollItem> call = apiService.enroll(payLoad);
Subscription subscription = call
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Subscriber<EnrollItem>() {
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
// cast to retrofit.HttpException to get the response code
if (e instanceof HttpException) {
HttpException response = (HttpException) e;
int code = response.code();
}
}
@Override
public void onNext (EnrollItem item){
}
});*/
}
private class submitPayload extends AsyncTask<Void, Void, String> {
@Override
protected String doInBackground(Void... params) {
HttpURLConnection urlConnection = null;
DataOutputStream wr = null;
InputStream is;
try {
URL url = new URL(AppConfig.ENROLL);
urlConnection = (HttpURLConnection) url.openConnection();
String data = parameters.toString();
urlConnection.setRequestMethod("POST");
urlConnection.setDoOutput(true);
urlConnection.setDoInput(true);
urlConnection.setUseCaches(false);
urlConnection.setConnectTimeout(15000);
urlConnection.setReadTimeout(30000);
urlConnection.setRequestProperty("Content-Type", "application/json");
urlConnection.setRequestProperty("Authorization", AppConfig.AUTHORIZATION);
urlConnection.setFixedLengthStreamingMode(data.getBytes().length);
urlConnection.connect();
wr = new DataOutputStream(urlConnection.getOutputStream());
wr.writeBytes(data);
wr.flush();
is = urlConnection.getInputStream();
StringBuffer buffer = new StringBuffer();
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
String line;
while ((line = reader.readLine()) != null) {
// Since it's JSON, adding a newline isn't necessary (it won't affect parsing)
// But it does make debugging a *lot* easier if you print out the completed
// buffer for debugging.
buffer.append(line + "\n");
}
return buffer.toString();
} catch (IOException e) {
Log.e(TAG, "Error ", e);
// If the code didn't successfully get the data, there's no point in attempting
// to parse it.
showSnackBar(getString(R.string.err_network_timeout));
/*try {
SyncItem item = new SyncItem();
item.setEnumerationCode(parameters.getString("enumerationCode"));
item.setLongitude(parameters.getDouble("longitude"));
item.setLatitude(parameters.getDouble("latitude"));
item.setPicture(parameters.getString("picture"));
item.setOfficerCode(parameters.getString("officerCode"));
item.setClientCode(parameters.getString("clientCode"));
syncDataSource.createSync(item);
}catch (JSONException jEx) {
Log.d(TAG, "Parse Json Error: " + jEx.getMessage());
}*/
return null;
} finally {
if (urlConnection != null) {
urlConnection.disconnect();
}
if (wr != null) {
try {
wr.close();
} catch (final IOException e) {
Log.e(TAG, "Error closing stream", e);
}
}
}
}
protected void onPostExecute(String s) {
super.onPostExecute(s);
if (s != null) {
try {
JSONObject responseObj = new JSONObject(s);
// Parsing json object response
// response will be a json object
JSONObject response = responseObj.getJSONObject("response");
} catch (JSONException e) {
e.printStackTrace();
}
}
enrollButton.setVisibility(View.VISIBLE);
progressBar.setVisibility(View.GONE);
}
}
private class MyTextWatcher implements TextWatcher {
private View view;
private MyTextWatcher(View view) {
this.view = view;
}
public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {
}
public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) {
}
public void afterTextChanged(Editable editable) {
switch (view.getId()) {
case R.id.input_enroll_first_name:
inputLayoutFirstName.setErrorEnabled(false);
//validatePhoneNumber(inputLoginPhone, inputLayoutLoginPhone);
break;
case R.id.input_enroll_last_name:
inputLayoutLastName.setErrorEnabled(false);
//validatePin(inputLoginPin, inputLayoutLoginPin);
break;
}
}
}
//SnackBar function
private void showSnackBar(String msg) {
CoordinatorLayout coordinatorLayout = (CoordinatorLayout) findViewById(R.id.enroll_coordinator_layout);
Snackbar snackbar = Snackbar
.make(coordinatorLayout, msg, Snackbar.LENGTH_LONG);
// Changing message text color
snackbar.setActionTextColor(Color.RED);
// Changing action button text color
View sbView = snackbar.getView();
TextView textView = (TextView) sbView.findViewById(android.support.design.R.id.snackbar_text);
textView.setTextColor(Color.WHITE);
snackbar.show();
}
}
|
mikolajgucki/ae-engine | engine/common/util/src/cpp/string_util.cpp | <reponame>mikolajgucki/ae-engine
#include <cstdlib>
#include <sstream>
#include "string_util.h"
using namespace std;
namespace ae {
namespace util {
/** */
bool startsWith(const string& str,const string& prefix) {
if (prefix.length() > str.length()) {
return false;
}
return prefix == str.substr(0,prefix.length());
}
/** */
static void split(const string &str,char delim,vector<string> &items) {
stringstream strStream(str);
string item;
while (getline(strStream,item,delim)) {
items.push_back(item);
}
}
/** */
vector<string> split(const string &str,char delim) {
vector<string> items;
split(str,delim,items);
return items;
}
/** */
void ltrim(string &str,const string &characters) {
size_t index = str.find_first_not_of(characters);
str.erase(0,index);
}
/** */
bool isInt(const string &str) {
unsigned int index = 0;
// negative
if (str.at(0) == '-') {
index++;
}
// ciphers
while (index < str.length()) {
int ch = (int)str.at(index);
if (ch < (int)'0' || ch > (int)'9') {
return false;
}
index++;
}
return true;
}
/** */
bool parseInt(const std::string &str,int &value) {
if (isInt(str) == false) {
return false;
}
value = (int)strtol(str.c_str(),(char **)0,10);
return true;
}
} // namespace
} // namespace |
jingfei/FirstTry | node_modules/mapshaper/src/gis/mapshaper-split.js | <reponame>jingfei/FirstTry
/* @requires mapshaper-common */
MapShaper.splitLayersOnField = function(layers, arcs, field) {
var splitLayers = [];
Utils.forEach(layers, function(lyr) {
splitLayers = splitLayers.concat(MapShaper.splitOnField(lyr, arcs, field));
});
return splitLayers;
};
MapShaper.splitOnField = function(lyr0, arcs, field) {
var dataTable = lyr0.data;
if (!dataTable) error("[splitOnField] Missing a data table");
if (!dataTable.fieldExists(field)) error("[splitOnField] Missing field:", field);
var index = {},
properties = dataTable.getRecords(),
shapes = lyr0.shapes,
splitLayers = [];
Utils.forEach(shapes, function(shp, i) {
var rec = properties[i],
key = String(rec[field]), // convert numbers to strings (for layer naming)
lyr, idx;
if (key in index === false) {
idx = splitLayers.length;
index[key] = idx;
splitLayers.push({
name: key || Utils.getUniqueName("layer"),
properties: [],
shapes: []
});
} else {
idx = index[key];
}
lyr = splitLayers[idx];
lyr.shapes.push(shapes[i]);
lyr.properties.push(properties[i]);
});
return Utils.map(splitLayers, function(obj) {
return Opts.copyNewParams({
name: obj.name,
shapes: obj.shapes,
data: new DataTable(obj.properties)
}, lyr0);
});
};
|
ajchdev/outside-event | node_modules/@wordpress/components/src/slot-fill/slot.js | <filename>node_modules/@wordpress/components/src/slot-fill/slot.js
/**
* External dependencies
*/
import { isFunction, isString, map, negate } from 'lodash';
/**
* WordPress dependencies
*/
import {
Children,
Component,
cloneElement,
isEmptyElement,
} from '@wordpress/element';
/**
* Internal dependencies
*/
import { Consumer } from './context';
class SlotComponent extends Component {
constructor() {
super( ...arguments );
this.isUnmounted = false;
this.bindNode = this.bindNode.bind( this );
}
componentDidMount() {
const { registerSlot } = this.props;
registerSlot( this.props.name, this );
}
componentWillUnmount() {
const { unregisterSlot } = this.props;
this.isUnmounted = true;
unregisterSlot( this.props.name, this );
}
componentDidUpdate( prevProps ) {
const { name, unregisterSlot, registerSlot } = this.props;
if ( prevProps.name !== name ) {
unregisterSlot( prevProps.name );
registerSlot( name, this );
}
}
bindNode( node ) {
this.node = node;
}
forceUpdate() {
if ( this.isUnmounted ) {
return;
}
super.forceUpdate();
}
render() {
const { children, name, fillProps = {}, getFills } = this.props;
const fills = map( getFills( name, this ), ( fill ) => {
const fillKey = fill.occurrence;
const fillChildren = isFunction( fill.children )
? fill.children( fillProps )
: fill.children;
return Children.map( fillChildren, ( child, childIndex ) => {
if ( ! child || isString( child ) ) {
return child;
}
const childKey = `${ fillKey }---${ child.key || childIndex }`;
return cloneElement( child, { key: childKey } );
} );
} ).filter(
// In some cases fills are rendered only when some conditions apply.
// This ensures that we only use non-empty fills when rendering, i.e.,
// it allows us to render wrappers only when the fills are actually present.
negate( isEmptyElement )
);
return <>{ isFunction( children ) ? children( fills ) : fills }</>;
}
}
const Slot = ( props ) => (
<Consumer>
{ ( { registerSlot, unregisterSlot, getFills } ) => (
<SlotComponent
{ ...props }
registerSlot={ registerSlot }
unregisterSlot={ unregisterSlot }
getFills={ getFills }
/>
) }
</Consumer>
);
export default Slot;
|
magdel/MapNav | src/main/java/misc/FileDialog.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package misc;
import RPMap.MapCanvas;
import java.io.InputStream;
import java.io.OutputStream;
import javax.microedition.io.*;
import javax.microedition.io.file.FileConnection;
import javax.microedition.lcdui.*;
import lang.Lang;
import lang.LangHolder;
/**
*
* @author rfk
*/
public class FileDialog implements CommandListener, Runnable, ProgressStoppable {
private FileDialog() {
}
private static FileDialog saveDialog;
Form fileForm;
ProgressReadWritable pRW;
Displayable backDisp;
Command mainCommand,
backCommand=new Command(LangHolder.getString(Lang.back), Command.BACK, 5),
browseCommand=new Command(LangHolder.getString(Lang.browse), Command.ITEM, 1);
Item[] items;
TextField textFileName;
String fileExt, name;
private static final byte DLG_SAVE=1;
private static final byte DLG_LOAD=2;
private byte dlgType;
private static void createAndShow(byte dlgType) {
saveDialog.dlgType=dlgType;
if (dlgType==DLG_SAVE){
saveDialog.fileForm=new Form(LangHolder.getString(Lang.save)+":"+saveDialog.name);
saveDialog.mainCommand=new Command(LangHolder.getString(Lang.save), Command.ITEM, 2);
saveDialog.textFileName=new TextField(LangHolder.getString(Lang.sendaddrfile)+"\n", null, 128, TextField.ANY);
} else if (dlgType==DLG_LOAD){
saveDialog.fileForm=new Form(LangHolder.getString(Lang.load)+":"+saveDialog.name);
saveDialog.mainCommand=new Command(LangHolder.getString(Lang.load), Command.ITEM, 2);
saveDialog.textFileName=new TextField(LangHolder.getString(Lang.urlfile)+"\n", null, 128, TextField.ANY);
}
saveDialog.fileForm.append(saveDialog.textFileName);
for (int i=0; i<saveDialog.items.length; i++) {
saveDialog.fileForm.append(saveDialog.items[i]);
}
saveDialog.fileForm.addCommand(saveDialog.mainCommand);
saveDialog.fileForm.addCommand(saveDialog.backCommand);
saveDialog.fileForm.addCommand(saveDialog.browseCommand);
saveDialog.fileForm.setCommandListener(saveDialog);
MapCanvas.setCurrent(saveDialog.fileForm);
}
public static void showSaveForm(String name, Item[] items, ProgressReadWritable pRW, Displayable backDisp, String fileExt) {
saveDialog=new FileDialog();
saveDialog.fileExt=fileExt;
saveDialog.name=name;
saveDialog.pRW=pRW;
saveDialog.backDisp=backDisp;
saveDialog.items=items;
createAndShow(DLG_SAVE);
}
public static void showLoadForm(String name, Item[] items, ProgressReadWritable pRW, Displayable backDisp, String fileExt) {
saveDialog=new FileDialog();
saveDialog.fileExt=fileExt;
saveDialog.name=name;
saveDialog.pRW=pRW;
saveDialog.backDisp=backDisp;
saveDialog.items=items;
createAndShow(DLG_LOAD);
}
public void commandAction(Command command, Displayable displayable) {
if (displayable==fileForm){
if (command==mainCommand){
//saveDialog=null;
//MapCanvas.setCurrent(backDisp);
MapCanvas.setCurrent(new ProgressForm((dlgType==DLG_LOAD)?LangHolder.getString(Lang.loading):LangHolder.getString(Lang.saving), name, this, fileForm));
(new Thread(this)).start();
} else if (command==backCommand){
saveDialog=null;
MapCanvas.setCurrent(backDisp);
} else if (command==browseCommand){
if (dlgType==DLG_SAVE){
MapCanvas.setCurrent(new BrowseList(fileForm, textFileName, BrowseList.DIRBROWSE, null, null));
} else {
MapCanvas.setCurrent(new BrowseList(fileForm, textFileName, BrowseList.FILEBROWSE, fileExt, null));
}
}
}
}
public void run() {
String fn=textFileName.getString();
if (dlgType==DLG_SAVE) {
if (fn.endsWith("_")) {
fn+=fileExt;
} else {
fn+="_"+fileExt;
}
}
try {
StreamConnection fc;
if (fn.startsWith("http://")){
fc=(StreamConnection) Connector.open(fn);
} else {
fc=(StreamConnection) Connector.open("file:///"+fn);
}
try {
if (dlgType==DLG_SAVE){
if (((FileConnection) fc).exists()){
throw new Exception("File already exists!");
}
((FileConnection) fc).create();
OutputStream os=fc.openOutputStream();
try {
pRW.writeData(os, items);
} finally {
os.close();
}
} else if (dlgType==DLG_LOAD){
InputStream is=fc.openInputStream();
try {
pRW.readData(is, items);
} finally {
is.close();
}
}
} finally {
fc.close();
}
MapCanvas.showmsg("OK", (dlgType==DLG_SAVE)?LangHolder.getString(Lang.saved)+" "+fn:LangHolder.getString(Lang.loaded)+" "+fn,
AlertType.INFO, backDisp);
} catch (Throwable t) {
MapCanvas.showmsgmodal("Error", t.toString()+" "+fn, AlertType.ERROR, fileForm);
}
}
public void setProgressResponse(ProgressResponse progressResponse) {
pRW.setProgressResponse(progressResponse);
}
public boolean stopIt() {
pRW.stopIt();
return true;
}
}
|
HarrisonKeeling/amazon-kinesis-video-streams-webrtc-sdk-js-with-amazon-cognito | node_modules/@trust/keyto/test/keys/ECDSA/P256.js | 'use strict'
/**
* PEM
* @ignore
*/
const privatePKCS1 = `-----<KEY>`
const privatePKCS8 = `-----BEGIN PRIVATE KEY-----
<KEY>
-----END PRIVATE KEY-----`
const publicPKCS8 = `-----BEGIN PUBLIC KEY-----
<KEY>
-----END PUBLIC KEY-----`
/**
* JWK
* @ignore
*/
const privateJwk = `{
"kty": "EC",
"crv": "P-256",
"d": "<KEY>",
"x": "<KEY>",
"y": "<KEY>"
}`
const publicJwk = `{
"kty": "EC",
"crv": "P-256",
"x": "<KEY>",
"y": "<KEY>"
}`
/**
* Export
* @ignore
*/
module.exports = {
privatePKCS1,
privatePKCS8,
publicPKCS8,
privateJwk,
publicJwk,
}
|
clauderichard/OptimistRacing | doc/Code Documentation/html/class_skybox.js | <gh_stars>0
var class_skybox =
[
[ "Skybox", "class_skybox.html#afc61cfd31346170303119c316a2eb7f4", null ],
[ "~Skybox", "class_skybox.html#a62ad4c6b4b1965a0a6d8536a50d4c090", null ]
]; |
Infoss/conf-profile-4-android | ConfProfile/jni/strongswan/src/libstrongswan/plugins/mysql/mysql_plugin.c | <reponame>Infoss/conf-profile-4-android<gh_stars>1-10
/*
* Copyright (C) 2008 <NAME>
* Hochschule fuer Technik Rapperswil
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version. See <http://www.fsf.org/copyleft/gpl.txt>.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*/
#include "mysql_plugin.h"
#include <library.h>
#include <utils/debug.h>
#include "mysql_database.h"
typedef struct private_mysql_plugin_t private_mysql_plugin_t;
/**
* private data of mysql_plugin
*/
struct private_mysql_plugin_t {
/**
* public functions
*/
mysql_plugin_t public;
};
METHOD(plugin_t, get_name, char*,
private_mysql_plugin_t *this)
{
return "mysql";
}
METHOD(plugin_t, get_features, int,
private_mysql_plugin_t *this, plugin_feature_t *features[])
{
static plugin_feature_t f[] = {
PLUGIN_REGISTER(DATABASE, mysql_database_create),
PLUGIN_PROVIDE(DATABASE, DB_MYSQL),
};
*features = f;
return countof(f);
}
METHOD(plugin_t, destroy, void,
private_mysql_plugin_t *this)
{
mysql_database_deinit();
free(this);
}
/*
* see header file
*/
plugin_t *mysql_plugin_create()
{
private_mysql_plugin_t *this;
if (!mysql_database_init())
{
DBG1(DBG_LIB, "MySQL client library initialization failed");
return NULL;
}
INIT(this,
.public = {
.plugin = {
.get_name = _get_name,
.get_features = _get_features,
.destroy = _destroy,
},
},
);
return &this->public.plugin;
}
|
pebble2015/cpoi | src/org/apache/poi/ss/formula/functions/BooleanFunction_5.cpp | // Generated from /POI/java/org/apache/poi/ss/formula/functions/BooleanFunction.java
#include <org/apache/poi/ss/formula/functions/BooleanFunction_5.hpp>
#include <java/lang/Boolean.hpp>
#include <java/lang/NullPointerException.hpp>
#include <org/apache/poi/ss/formula/eval/BoolEval.hpp>
#include <org/apache/poi/ss/formula/eval/ErrorEval.hpp>
#include <org/apache/poi/ss/formula/eval/EvaluationException.hpp>
#include <org/apache/poi/ss/formula/eval/OperandResolver.hpp>
#include <org/apache/poi/ss/formula/eval/ValueEval.hpp>
template<typename ComponentType, typename... Bases> struct SubArray;
namespace poi
{
namespace ss
{
namespace formula
{
namespace eval
{
typedef ::SubArray< ::poi::ss::formula::eval::ValueEval, ::java::lang::ObjectArray > ValueEvalArray;
} // eval
} // formula
} // ss
} // poi
template<typename T>
static T* npc(T* t)
{
if(!t) throw new ::java::lang::NullPointerException();
return t;
}
poi::ss::formula::functions::BooleanFunction_5::BooleanFunction_5()
: super(*static_cast< ::default_init_tag* >(0))
{
clinit();
ctor();
}
poi::ss::formula::eval::ValueEval* poi::ss::formula::functions::BooleanFunction_5::evaluate(int32_t srcRowIndex, int32_t srcColumnIndex, ::poi::ss::formula::eval::ValueEval* arg0)
{
bool boolArgVal;
try {
auto ve = ::poi::ss::formula::eval::OperandResolver::getSingleValue(arg0, srcRowIndex, srcColumnIndex);
auto b = ::poi::ss::formula::eval::OperandResolver::coerceValueToBoolean(ve, false);
boolArgVal = b == nullptr ? false : npc(b)->booleanValue();
} catch (::poi::ss::formula::eval::EvaluationException* e) {
return npc(e)->getErrorEval();
}
return ::poi::ss::formula::eval::BoolEval::valueOf(!boolArgVal);
}
extern java::lang::Class *class_(const char16_t *c, int n);
java::lang::Class* poi::ss::formula::functions::BooleanFunction_5::class_()
{
static ::java::lang::Class* c = ::class_(u"", 0);
return c;
}
poi::ss::formula::eval::ValueEval* poi::ss::formula::functions::BooleanFunction_5::evaluate(::poi::ss::formula::eval::ValueEvalArray* args, int32_t srcRowIndex, int32_t srcColumnIndex)
{
return super::evaluate(args, srcRowIndex, srcColumnIndex);
}
java::lang::Class* poi::ss::formula::functions::BooleanFunction_5::getClass0()
{
return class_();
}
|
mengxiangrui007/ssh_easyui | dataCollection/src/com/data/dao/item/impl/ItemDaoImpl.java | <reponame>mengxiangrui007/ssh_easyui
package com.data.dao.item.impl;
import java.util.List;
import org.hibernate.Query;
import org.hibernate.SQLQuery;
import org.hibernate.transform.Transformers;
import org.springframework.stereotype.Repository;
import com.data.dao.base.impl.BaseDaoImpl;
import com.data.model.Item;
/**
* @ClassName: ItemDaoImpl
* @Description: TODO(调查项目特殊业务处理)
* @author: 孟祥瑞
* @company: 赤峰宏微网络科技有限公司
* @date 2016年5月20日 下午5:04:46
*/
@Repository("itemDaoImpl")
public class ItemDaoImpl extends BaseDaoImpl<Item, Integer> {
/**
* @Title: updateItemIsHaveQuestion
* @Description: TODO(更新调查项问题)
* @param @param item
* @param @return 设定文件
* @return int 返回类型
* @author: 孟祥瑞
* @date 2016年5月25日 下午3:00:21
* @throws
*/
public int updateItemIsHaveQuestion(Item item) throws Exception {
sql = "UPDATE item SET ISHAVA_QUESTION=1 WHERE (ID=?)";
Query query = this.getSessionFactory().getCurrentSession()
.createSQLQuery(sql);
query.setParameter(0, item.getId());
return query.executeUpdate();
}
/**
* @Title: queryHaveQuestionItem
* @Description: TODO(查询已创建问题调查项目)
* @param @param departmentIdList
* @param @return
* @param @throws Exception 设定文件
* @return List<Item> 返回类型
* @author: 孟祥瑞
* @date 2016年5月26日 下午9:36:06
* @throws
*/
@SuppressWarnings("unchecked")
public List<Item> queryHaveQuestionItem(List<Integer> departmentIdList)
throws Exception {
sql = "select t1.id as id,t1.name as name,t1.DESCRIPTION as description from item t1 where t1.ISHAVA_QUESTION = 1 and t1.DEPARTMENT_ID in(:departmentIdList)";
SQLQuery query = this.getSessionFactory().getCurrentSession()
.createSQLQuery(sql);
query.addScalar("id", new org.hibernate.type.IntegerType());
query.addScalar("name", new org.hibernate.type.StringType());
query.addScalar("description", new org.hibernate.type.StringType());
query.setParameterList("departmentIdList", departmentIdList);
return query.setResultTransformer(Transformers.aliasToBean(Item.class))
.list();
}
}
|
Daz2345/race | priceRanger_scenarioRun/lib/server/index.js | <reponame>Daz2345/race
Meteor.startup(function () {
ScenarioRuns._ensureIndex({"createdAt": 1});
ScenarioRuns._ensureIndex({"createdBy": 1});
ScenarioRuns._ensureIndex({"name": 1});
ScenarioRuns._ensureIndex({"userId": 1});
ScenarioRuns._ensureIndex({"scenarioId": 1});
ScenarioRuns._ensureIndex({"createdAt": 1, "scenarioId": 1});
});
|
FelixTheC/python_morsels | interleave.py | <filename>interleave.py<gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@created: 01.01.20
@author: felix
"""
from itertools import zip_longest
def interleave(*args):
NULL = object()
return (elem for z in zip_longest(*(list(t) for t in args), fillvalue=NULL) for elem in z if elem is not NULL)
if __name__ == '__main__':
in1 = [1, 2, 3, 4]
in2 = (n ** 2 for n in in1)
print(list(interleave(in1, in2)))
|
yongquanf/LocalitySensitiveSketch | src/util/bloom/Cuckoo/Example.java | package util.bloom.Cuckoo;
import com.google.common.hash.Funnels;
import util.bloom.Cuckoo.Utils.Algorithm;
public class Example {
public static void main(String[] args) {
// create
CuckooFilter<Integer> filter = new CuckooFilter.Builder<>(Funnels.integerFunnel(), 2000000).withFalsePositiveRate(0.01).withHashAlgorithm(Algorithm.Murmur3_128).build();
// insert
if (filter.put(42)) {
System.out.println("Insert Success!");
}
// contains
if (filter.mightContain(42)) {
System.out.println("Found 42!");
}
// count
System.out.println("Filter has " + filter.getCount() + " items");
// count
System.out.println("42 has been inserted approximately " + filter.approximateCount(42) + " times");
// % loaded
System.out.println("Filter is " + String.format("%.0f%%", filter.getLoadFactor() * 100) + " loaded");
// delete
if (filter.delete(42)) {
System.out.println("Delete Success!");
}
}
}
|
veita/jaffre | src/main/java/org/jaffre/spi/NoopLogger.java | <reponame>veita/jaffre<gh_stars>0
/*
* (C) Copyright 2008-2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jaffre.spi;
import org.jaffre.Logger;
/**
* A logger that does nothing.
* @author <NAME>
*/
public class NoopLogger implements Logger
{
public NoopLogger()
{
}
/**
* @return Always <code>false</code>.
* @see org.jaffre.Logger#isErrorEnabled()
*/
@Override
public boolean isErrorEnabled()
{
return false;
}
/**
* This method does nothing.
*/
@Override
public void error(String p_strMessage, Throwable p_e)
{
}
/**
* This method does nothing.
*/
@Override
public void error(String p_strMessage)
{
}
/**
* @return Always <code>false</code>.
* @see org.jaffre.Logger#isWarnEnabled()
*/
@Override
public boolean isWarnEnabled()
{
return false;
}
/**
* This method does nothing.
*/
@Override
public void warn(String p_strMessage, Throwable p_e)
{
}
/**
* This method does nothing.
*/
@Override
public void warn(String p_strMessage)
{
}
/**
* @return Always <code>false</code>.
* @see org.jaffre.Logger#isInfoEnabled()
*/
@Override
public boolean isInfoEnabled()
{
return false;
}
/**
* This method does nothing.
*/
@Override
public void info(String p_strMessage, Throwable p_e)
{
}
/**
* This method does nothing.
*/
@Override
public void info(String p_strMessage)
{
}
/**
* @return Always <code>false</code>.
* @see org.jaffre.Logger#isDebugEnabled()
*/
@Override
public boolean isDebugEnabled()
{
return false;
}
/**
* This method does nothing.
*/
@Override
public void debug(String p_strMessage, Throwable p_e)
{
}
/**
* This method does nothing.
*/
@Override
public void debug(String p_strMessage)
{
}
/**
* @return Always <code>false</code>.
* @see org.jaffre.Logger#isTraceEnabled()
*/
@Override
public boolean isTraceEnabled()
{
return false;
}
/**
* This method does nothing.
*/
@Override
public void trace(String p_strMessage, Throwable p_e)
{
}
/**
* This method does nothing.
*/
@Override
public void trace(String p_strMessage)
{
}
}
|
Pondorasti/teamo | src/api/lobby-template/Platform.js | <gh_stars>1-10
const Platforms = {
label: "Platforms",
options: ["PC", "PlayStation", "Xbox", "VR"],
}
export default Platforms
|
pmehta08/MulensModel | examples/use_cases/use_case_15_emcee_simple_PSPL.py | <filename>examples/use_cases/use_case_15_emcee_simple_PSPL.py
"""
Use Case 15: Fit a point lens event with emcee.
"""
import numpy as np
import emcee
import os
import MulensModel as mm
def lnlike(theta, event, parameters_to_fit):
"""likelihood function """
for key, val in enumerate(parameters_to_fit):
setattr(event.model.parameters, val, theta[key])
return -0.5 * (event.get_chi2() - chi2_0)
def lnprior(theta, parameters_to_fit):
"""priors"""
if theta[parameters_to_fit.index("t_E")] < 0.:
return -np.inf
return 0.0
def lnprob(theta, event, parameters_to_fit):
"""combines likelihood and priors"""
lp = lnprior(theta, parameters_to_fit)
if not np.isfinite(lp):
return -np.inf
ln_like = lnlike(theta, event, parameters_to_fit)
if np.isnan(ln_like): # In the cases that source fluxes are negative we
return -np.inf # want to return these as if they were not in priors.
return lp + ln_like
# Initialize the model
parameters_to_fit = ["t_0", "u_0", "t_E"]
parameters_values = [2457500., 0.5, 100.]
parameters_steps = [1., 0.01, 1.]
model = mm.Model(
{'t_0': parameters_values[0], 'u_0': parameters_values[1],
't_E': parameters_values[2]})
print("Initial", model.parameters)
# Read in the data
file_ = os.path.join(mm.DATA_PATH, "photometry_files",
"OB08092", "phot_ob08092_O4.dat")
data = mm.MulensData(file_name=file_)
# Set up the Event
event = mm.Event(datasets=data, model=model)
# Baseline chi2 = # of data points
chi2_0 = len(data.time) * 1.
# Initializations for emcee
ndim = len(parameters_values)
nwalkers = 100
nsteps = 500
burn = 50
start = [parameters_values + np.random.randn(ndim) * parameters_steps
for i in range(nwalkers)]
sampler = emcee.EnsembleSampler(
nwalkers, ndim, lnprob, args=(event, parameters_to_fit))
# Run emcee - Fails because tries to set negative t_E.
# Verbose option to diagnose?
sampler.run_mcmc(start, nsteps)
samples = sampler.chain[:, burn:, :].reshape((-1, ndim))
results = map(
lambda v: (v[1], v[2]-v[1], v[1]-v[0]),
zip(*np.percentile(samples, [16, 50, 84], axis=0)))
# Output fit
for r in results:
print(*r)
|
chpatel3/coherence-cpp-extend-client | src/coherence/util/Event.cpp | <gh_stars>1-10
/*
* Copyright (c) 2000, 2020, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* http://oss.oracle.com/licenses/upl.
*/
#include "coherence/util/Event.hpp"
COH_OPEN_NAMESPACE2(coherence,util)
// ----- constructors -------------------------------------------------------
Event::Event(Object::View vSource)
: f_vSource(self(), vSource)
{
}
// ----- Object interface ---------------------------------------------------
TypedHandle<const String> Event::toString() const
{
return COH_TO_STRING("Event{" << getSource() << '}');
}
// ----- accessors ----------------------------------------------------------
Object::View Event::getSource() const
{
return f_vSource;
}
COH_CLOSE_NAMESPACE2
|
nphkh/fluid-framework | FluidFrameworkAndroid/src/com/sponberg/fluid/android/util/DeviceUtil.java | package com.sponberg.fluid.android.util;
import android.os.Build;
public class DeviceUtil {
public static boolean isEmulator() {
return Build.BRAND.equals("generic");
}
}
|
jayjlawrence/prawn | manual/text/registering_families.rb | <filename>manual/text/registering_families.rb
# encoding: utf-8
#
# Registering font families will help you when you want to use a font over and
# over or if you would like to take advantage of the <code>:style</code> option
# of the text methods and the <code>b</code> and <code>i</code> tags when using
# inline formatting.
#
# To register a font family update the <code>font_families</code>
# hash with the font path for each style you want to use.
#
require File.expand_path(File.join(File.dirname(__FILE__),
%w[.. example_helper]))
filename = File.basename(__FILE__).gsub('.rb', '.pdf')
Prawn::Example.generate(filename) do
# Registering a single TTF font
font_families.update("Chalkboard" => {
:normal => "#{Prawn::DATADIR}/fonts/Chalkboard.ttf"
})
font("Chalkboard") do
text "Using the Chalkboard font providing only its name to the font method"
end
move_down 20
# Registering a DFONT package
font_path = "#{Prawn::DATADIR}/fonts/Action Man.dfont"
font_families.update("Action Man" => {
:normal => { :file => font_path, :font => "ActionMan" },
:italic => { :file => font_path, :font => "ActionMan-Italic" },
:bold => { :file => font_path, :font => "ActionMan-Bold" },
:bold_italic => { :file => font_path, :font => "ActionMan-BoldItalic" }
})
font "Action Man"
text "Also using the Action Man by providing only its name"
move_down 20
text "Taking <b>advantage</b> of the <i>inline formatting</i>",
:inline_format => true
move_down 20
[:bold, :bold_italic, :italic, :normal].each do |style|
text "Using the #{style} style option.",
:style => style
move_down 10
end
end
|
unverbraucht/cybergarage-upnp | tools/igd-tool/src/main/java/UpnpIGDToolAddPortDlg.java | <reponame>unverbraucht/cybergarage-upnp
/******************************************************************
*
* CyberUPnP for Java
*
* Copyright (C) <NAME> 2002
*
* File : ActionDialog.java
*
******************************************************************/
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import javax.swing.*;
import org.cybergarage.upnp.*;
import org.cybergarage.upnp.Action;
public class UpnpIGDToolAddPortDlg extends JDialog implements ActionListener
{
private Device igdDevice;
private JButton okButton;
private JButton cancelButton;
private boolean result;
private ArgumentList inArgList;
private Vector inArgFieldList;
private JTextField nameLabel;
private JComboBox protoLabel;
private JTextField wanPortLabel;
private JTextField lanIpLabel;
private JTextField lanPortLabel;
public UpnpIGDToolAddPortDlg(Frame frame, Device dev)
{
super(frame, true);
getContentPane().setLayout(new BorderLayout());
inArgList = new ArgumentList();
inArgFieldList = new Vector();
igdDevice = dev;
JPanel argListPane = new JPanel();
argListPane.setLayout(new GridLayout(0, 2));
getContentPane().add(argListPane, BorderLayout.CENTER);
JLabel staticLabel;
staticLabel = new JLabel("Name");
nameLabel = new JTextField();
argListPane.add(staticLabel);
argListPane.add(nameLabel);
staticLabel = new JLabel("Protocol");
protoLabel = new JComboBox();
protoLabel.addItem("TCP");
protoLabel.addItem("UDP");
argListPane.add(staticLabel);
argListPane.add(protoLabel);
staticLabel = new JLabel("WAN port");
wanPortLabel = new JTextField();
argListPane.add(staticLabel);
argListPane.add(wanPortLabel);
staticLabel = new JLabel("LAN IP");
lanIpLabel = new JTextField();
argListPane.add(staticLabel);
argListPane.add(lanIpLabel);
staticLabel = new JLabel("LAN port");
lanPortLabel = new JTextField();
argListPane.add(staticLabel);
argListPane.add(lanPortLabel);
okButton = new JButton("OK");
okButton.addActionListener(this);
cancelButton = new JButton("Cancel");
cancelButton.addActionListener(this);
JPanel buttonPane = new JPanel();
buttonPane.add(okButton);
buttonPane.add(cancelButton);
getContentPane().add(buttonPane, BorderLayout.SOUTH);
pack();
Dimension size = getSize();
Point fpos = frame.getLocationOnScreen();
Dimension fsize = frame.getSize();
setLocation(fpos.x + (fsize.width - size.width)/2, fpos.y +(fsize.height - size.height)/2);
}
////////////////////////////////////////////////
// Arguments
////////////////////////////////////////////////
public String getName()
{
return nameLabel.getText();
}
public String getProtocol()
{
return (String)protoLabel.getSelectedItem();
}
public int getExternalPort()
{
try {
String wanPortStr = wanPortLabel.getText();
return Integer.parseInt(wanPortStr);
}
catch (Exception e) {
}
return 0;
}
public String getInternalIP()
{
return lanIpLabel.getText();
}
public int getInternalPort()
{
try {
String lanPortStr = wanPortLabel.getText();
return Integer.parseInt(lanPortStr);
}
catch (Exception e) {
}
return 0;
}
////////////////////////////////////////////////
// actionPerformed
////////////////////////////////////////////////
public void actionPerformed(ActionEvent e)
{
if (e.getSource() == okButton) {
result = true;
}
if (e.getSource() == cancelButton) {
result = false;
}
dispose();
}
////////////////////////////////////////////////
// actionPerformed
////////////////////////////////////////////////
public boolean doModal()
{
show();
return result;
}
}
|
tbcole/majoranaJJ | demos/dense_op/bands/H0/square.py | <filename>demos/dense_op/bands/H0/square.py
import numpy as np
import matplotlib.pyplot as plt
from numpy import linalg as LA
import majoranas.modules.constants as const
import majoranas.modules.lattice as lat
import majoranas.modules.operators as op
import majoranas.modules.alt_mod.altoperators as aop
print("hbar = {} [J*s]".format(const.hbarJ))
print("hbar = {} [ev*s]".format(const.hbar))
print("mass of electron = {} [kg]".format(const.m0))
print("hbar**2/m0 = {} [eV A^2]".format(const.xi))
ax = 2 #atomic spacing along x-direction in [A]
ay = 2 #atomic spacing along y-direction in [A]
Nx = 3 #number of lattice sites in x direction
Ny = 3 #number of lattice sites in y direction
N = Ny*Nx #Total number of lattice sites
coor = lat.square(Nx, Ny) #square coordinate array
NN = lat.NN_Arr(coor) #nearest neighbor array of square lattice
NNb = lat.NN_Bound(NN, coor) #periodic NN array
Lx = (max(coor[:, 0]) - min(coor[:, 0]) + 1)*ax #Unit cell size in x-direction
Ly = (max(coor[:, 1]) - min(coor[:, 1]) + 1 )*ay #Unit cell size in y-direction
tx = -const.xi/(ax**2) #Hopping in [eV]
ty = -const.xi/(ay**2) #Hopping in [eV]
print("Lx", Lx)
print("Number of Lattice Sites= ", N)
print("Unit cell size in x-direction = {} [A] = {} [m]".format(Lx, Lx*1e-10))
print("Unit cell size in y-direction = {} [A] = {} [m]".format(Ly, Ly*1e-10))
print("Hopping Parameter tx = {} [ev]".format(tx))
print("Hopping Parameter ty = {} [ev]".format(ty))
#H0k(qx, qy, coor, ax, ay)
steps = 50
nbands = N
qx = np.linspace(-np.pi/Lx, np.pi/Lx, steps)
qy = np.linspace(-np.pi/Ly, np.pi/Ly, steps)
eigarr = np.zeros((steps, nbands))
for i in range(steps):
eigarr[i, :] = LA.eigh(aop.H0k(coor, ax, ay, qx[i], 0))[0][:nbands]
op.bands(eigarr, qx, Lx, Ly, title = 'original FP')
#H_SOk(coor, ax, ay, qx, qy, V, gamma, alpha)
#V_periodic(V0, Nx, Ny, coor)
alpha = 0.2 #[eV*A]
gamma = 0*0.01 #[T]
V0 = 0.0
V = op.V_periodic(V0, coor)
nbands = 2*N
qx = np.linspace(-np.pi/Lx, np.pi/Lx, steps)
qy = np.linspace(-np.pi/Ly, np.pi/Ly, steps)
eigarr = np.zeros((steps, nbands))
for i in range(steps):
eigarr[i, :] = LA.eigh(aop.H_SOk(coor, ax, ay, qx[i], 0, 0, gamma, alpha))[0][:nbands]
op.bands(eigarr, qx, Lx, Ly, title = " original SOC")
# H0(coor, ax, ay, potential = 0, gammax = 0, gammay = 0, gammaz = 0,
# alpha = 0, qx = 0, qy = 0,
# periodic = 'yes'
# ):
#V_periodic(V0, Nx, Ny, coor)
a = 0.2 #[eV*A]
gamma = 0*0.01 #[T]
V0 = 0
nbands = 2*N
qx = np.linspace(-np.pi/Lx, np.pi/Lx, steps)
qy = np.linspace(-np.pi/Ly, np.pi/Ly, steps)
V = op.V_periodic(V0, coor)
eigarrsoc = np.zeros((steps, nbands))
eigarrfp = np.zeros((steps, nbands))
for i in range(steps):
eigarrsoc[i, :] = LA.eigh(op.H0(coor, ax, ay, alpha = a, gammaz = gamma, potential = V, qx = qx[i]))[0][:nbands]
for i in range(steps):
eigarrfp[i, :] = LA.eigh(op.H0(coor, ax, ay, qx = qx[i]))[0][:nbands]
op.bands(eigarrsoc, qx, Lx, Ly, title = 'New SOC: alpha = {}, gammaz = {}, Potential = {}, qy = 0'.format(a, gamma, V0))
op.bands(eigarrfp, qx, Lx, Ly, title = 'New FP: alpha = {}, gammaz = {}, Potential = {}, qy = 0'.format(0, 0, 0))
|
ceekay1991/AliPayForDebug | AliPayForDebug/AliPayForDebug/AlipayWallet_Headers/AlipayLifeEventManager.h | //
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>.
//
#import <objc/NSObject.h>
@class NSMutableArray;
@interface AlipayLifeEventManager : NSObject
{
NSMutableArray *_arrayRecallObservers;
}
+ (void)unregisterRecallNotification:(id)arg1;
+ (void)registerRecallNotification:(id)arg1;
+ (id)allocWithZone:(struct _NSZone *)arg1;
+ (id)getInstance;
@property(retain, nonatomic) NSMutableArray *arrayRecallObservers; // @synthesize arrayRecallObservers=_arrayRecallObservers;
- (void).cxx_destruct;
- (void)recallArticleResult:(id)arg1;
- (void)AlipayLifeAction:(id)arg1;
- (void)unregisterAlipayLifeNotification;
- (void)registerAlipayLifeNotification;
- (id)copyWithZone:(struct _NSZone *)arg1;
@end
|
szemyxz/Filmbox | src/main/java/pl/filmbox/repositories/implementations/UserRepositoryImpl.java | package pl.filmbox.repositories.implementations;
import org.springframework.stereotype.Repository;
import pl.filmbox.models.User;
import pl.filmbox.repositories.UserRepository;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import java.util.List;
@Repository
public class UserRepositoryImpl implements UserRepository {
@PersistenceContext
private EntityManager entityManager;
@Override
public User addAndUpdateUser(User user) {
return entityManager.merge(user);
}
@Override
public User getUser(Long userId) {
return entityManager.find(User.class, userId);
}
@Override
public User getUserByUsername(String username) {
User user = null;
try{
TypedQuery<User> typedQuery = entityManager.createQuery(
"SELECT u FROM "+ User.class.getSimpleName() +" u where u.username = :username",
User.class
);
typedQuery.setParameter("username", username);
user = typedQuery.getSingleResult();
}
catch (NoResultException nre) {
//do nothing
}
return user;
}
@Override
public User getUserByEmail(String email) {
User user = null;
try{
TypedQuery<User> typedQuery = entityManager.createQuery(
"SELECT u FROM "+ User.class.getSimpleName() +" u where u.email = :email",
User.class
);
typedQuery.setParameter("email", email);
user = typedQuery.getSingleResult();
}
catch (NoResultException nre) {
//do nothing
}
return user;
}
@Override
public List<User> getAllUsers() {
TypedQuery<User> typedQuery = entityManager.createQuery(
"SELECT u from " + User.class.getSimpleName() + " u",
User.class
);
return typedQuery.getResultList();
}
@Override
public void deleteUser(User user) {
entityManager.remove(
entityManager.contains(user) ? user : entityManager.merge(user)
);
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.